Repository: lens
Updated Branches:
  refs/heads/master 2539f338a -> 2f0e5fdbf


LENS-813: For multifact queries, having clauses are getting added to both sub 
queries.


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/2f0e5fdb
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/2f0e5fdb
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/2f0e5fdb

Branch: refs/heads/master
Commit: 2f0e5fdbf079c301bcd41724e61b90b9af4fad33
Parents: 2539f33
Author: Rajat Khandelwal <pro...@apache.org>
Authored: Mon Feb 1 21:23:01 2016 +0530
Committer: Rajat Khandelwal <rajatgupt...@gmail.com>
Committed: Mon Feb 1 21:23:01 2016 +0530

----------------------------------------------------------------------
 .../lens/cube/error/LensCubeErrorCode.java      |   1 -
 .../apache/lens/cube/parse/AliasDecider.java    |  26 ++
 .../apache/lens/cube/parse/CandidateFact.java   |  72 ++++--
 .../lens/cube/parse/CubeQueryContext.java       |  36 ++-
 .../lens/cube/parse/DefaultAliasDecider.java    |  31 +++
 .../lens/cube/parse/ExpressionResolver.java     |   4 +-
 .../org/apache/lens/cube/parse/HQLParser.java   |  41 +++
 .../lens/cube/parse/MultiFactHQLContext.java    |  85 ++++++-
 .../parse/SingleFactMultiStorageHQLContext.java |  50 +---
 .../lens/cube/parse/TestBaseCubeQueries.java    | 249 +++++++++++++++++--
 10 files changed, 491 insertions(+), 104 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java 
b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 61d08b2..e49b58b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -57,7 +57,6 @@ public enum LensCubeErrorCode {
   STORAGE_UNION_DISABLED(3031, 1500),
   COULD_NOT_PARSE_EXPRESSION(3032, 1500),
   QUERIED_TABLE_NOT_FOUND(3033, 0),
-
   // Error codes greater than 3100 are errors while doing a metastore 
operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
   TIMELINE_ABSENT(3102, 100),

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
new file mode 100644
index 0000000..e9ddb9f
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasDecider.java
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+public interface AliasDecider {
+  String decideAlias(ASTNode node);
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index c305244..4faebe1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -29,6 +29,7 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
@@ -54,19 +55,26 @@ public class CandidateFact implements CandidateTable, 
QueryAST {
   private final Set<FactPartition> partsQueried = Sets.newHashSet();
 
   private CubeInterface baseTable;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode selectAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode whereAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode groupByAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode havingAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode joinAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode orderByAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private Integer limitValue;
   private List<TimeRangeNode> timenodes = Lists.newArrayList();
   private final List<Integer> selectIndices = Lists.newArrayList();
@@ -103,6 +111,40 @@ public class CandidateFact implements CandidateTable, 
QueryAST {
     return (!timeRange.getFromDate().before(fact.getStartTime())) && 
(!timeRange.getToDate().after(fact.getEndTime()));
   }
 
+  public void addToHaving(ASTNode ast) {
+    if (getHavingAST() == null) {
+      setHavingAST(new ASTNode(new CommonToken(TOK_HAVING, "TOK_HAVING")));
+      getHavingAST().addChild(ast);
+      return;
+    }
+    ASTNode existingHavingAST = (ASTNode) getHavingAST().getChild(0);
+    ASTNode newHavingAST = new ASTNode(new CommonToken(KW_AND, "KW_AND"));
+    newHavingAST.addChild(ast);
+    newHavingAST.addChild(existingHavingAST);
+    getHavingAST().setChild(0, newHavingAST);
+  }
+
+  public String addAndGetAliasFromSelect(ASTNode ast, AliasDecider 
aliasDecider) {
+    for (Node n : getSelectAST().getChildren()) {
+      ASTNode astNode = (ASTNode) n;
+      if (HQLParser.equalsAST(ast, (ASTNode) astNode.getChild(0))) {
+        if (astNode.getChildCount() > 1) {
+          return astNode.getChild(1).getText();
+        }
+        String alias = aliasDecider.decideAlias(astNode);
+        astNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
+        return alias;
+      }
+    }
+    // Not found, have to add to select
+    String alias = aliasDecider.decideAlias(ast);
+    ASTNode selectExprNode = new ASTNode(new CommonToken(TOK_SELEXPR));
+    selectExprNode.addChild(ast);
+    selectExprNode.addChild(new ASTNode(new CommonToken(Identifier, alias)));
+    getSelectAST().addChild(selectExprNode);
+    return alias;
+  }
+
   static class TimeRangeNode {
     ASTNode timenode;
     ASTNode parent;
@@ -129,15 +171,17 @@ public class CandidateFact implements CandidateTable, 
QueryAST {
     if (cubeql.getGroupByAST() != null) {
       setGroupByAST(HQLParser.copyAST(cubeql.getGroupByAST()));
     }
-    if (cubeql.getHavingAST() != null) {
-      setHavingAST(HQLParser.copyAST(cubeql.getHavingAST()));
-    }
   }
 
+
   public String getWhereClause(String storageTable) {
     return getStorgeWhereClauseMap().get(storageTable);
   }
 
+  public boolean isExpressionAnswerable(ASTNode node, CubeQueryContext 
context) throws LensException {
+    return getColumns().containsAll(getColsInExpr(context, 
context.getCube().getAllFieldNames(), node));
+  }
+
   /**
    * Update the ASTs to include only the fields queried from this fact, in all 
the expressions
    *
@@ -179,11 +223,10 @@ public class CandidateFact implements CandidateTable, 
QueryAST {
       currentChild++;
     }
 
-    // update whereAST to include only filters of this fact
-    // TODO
+    // don't need to update where ast, since where is only on dim attributes 
and dim attributes
+    // are assumed to be common in multi fact queries.
 
-    // update havingAST to include only filters of this fact
-    // TODO
+    // push down of having clauses happens just after this call in 
cubequerycontext
   }
 
   private Set<String> getColsInExpr(final CubeQueryContext cubeql, final 
Set<String> cubeCols,
@@ -304,7 +347,6 @@ public class CandidateFact implements CandidateTable, 
QueryAST {
   }
 
 
-
   /**
    * @return the selectIndices
    */

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 79dd88c..ebf8875 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -25,6 +25,8 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import static com.google.common.base.Preconditions.checkArgument;
 
+
+
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.*;
@@ -132,9 +134,11 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
   protected final Map<Dimension, Set<CandidateDim>> candidateDims = new 
HashMap<Dimension, Set<CandidateDim>>();
 
   // query trees
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode havingAST;
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode selectAST;
 
   // Will be set after the Fact is picked and time ranges replaced
@@ -142,7 +146,8 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
   @Setter
   private ASTNode whereAST;
 
-  @Getter @Setter
+  @Getter
+  @Setter
   private ASTNode orderByAST;
   // Setter is used in promoting the select when promotion is on.
   @Getter
@@ -352,7 +357,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
 
   // map of ref column in query to set of Dimension that have the column - 
which are added as optional dims
   @Getter
-  private Map<String, Set<Aliased<Dimension>>>  refColToDim = 
Maps.newHashMap();
+  private Map<String, Set<Aliased<Dimension>>> refColToDim = Maps.newHashMap();
 
   public void updateRefColDim(String col, Aliased<Dimension> dim) {
     Set<Aliased<Dimension>> refDims = refColToDim.get(col.toLowerCase());
@@ -369,10 +374,11 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
     private String exprCol;
     private String alias;
   }
+
   // map of expression column in query to set of Dimension that are accessed 
in the expression column - which are added
   // as optional dims
   @Getter
-  private Map<QueriedExprColumn, Set<Aliased<Dimension>>>  exprColToDim = 
Maps.newHashMap();
+  private Map<QueriedExprColumn, Set<Aliased<Dimension>>> exprColToDim = 
Maps.newHashMap();
 
   public void updateExprColDim(String tblAlias, String col, Aliased<Dimension> 
dim) {
 
@@ -400,7 +406,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
   }
 
   public void addOptionalJoinDimTable(String alias, boolean isRequired) throws 
LensException {
-    addOptionalDimTable(alias, null, isRequired, null, false, (String[])null);
+    addOptionalDimTable(alias, null, isRequired, null, false, (String[]) null);
   }
 
   public void addOptionalExprDimTable(String dimAlias, String queriedExpr, 
String srcTableAlias,
@@ -665,6 +671,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
   public Integer getLimitValue() {
     return qb.getParseInfo().getDestLimit(getClause());
   }
+
   public void setLimitValue(Integer value) {
     qb.getParseInfo().setDestLimit(getClause(), value);
   }
@@ -809,7 +816,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
             }
           }
           log.error("Query rewrite failed due to NO_CANDIDATE_DIM_AVAILABLE, 
Cause {}",
-                  dimPruningMsgs.get(dim).toJsonObject());
+            dimPruningMsgs.get(dim).toJsonObject());
           throw new NoCandidateDimAvailableException(dimPruningMsgs.get(dim));
         }
       }
@@ -852,8 +859,10 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
   }
 
   private HQLContextInterface hqlContext;
-  @Getter private Collection<CandidateFact> pickedFacts;
-  @Getter private Collection<CandidateDim> pickedDimTables;
+  @Getter
+  private Collection<CandidateFact> pickedFacts;
+  @Getter
+  private Collection<CandidateDim> pickedDimTables;
 
   private void addRangeClauses(CandidateFact fact) throws LensException {
     if (fact != null) {
@@ -881,7 +890,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
       autoJoinCtx.pruneAllPaths(cube, cfacts, dimsToQuery);
     }
 
-    Map<CandidateFact, Set<Dimension>> factDimMap = new HashMap<CandidateFact, 
Set<Dimension>>();
+    Map<CandidateFact, Set<Dimension>> factDimMap = new HashMap<>();
     if (cfacts != null) {
       if (cfacts.size() > 1) {
         // copy ASTs for each fact
@@ -890,8 +899,6 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
           factDimMap.put(cfact, new HashSet<>(dimsToQuery.keySet()));
         }
       }
-    }
-    if (cfacts != null) {
       for (CandidateFact fact : cfacts) {
         addRangeClauses(fact);
       }
@@ -907,6 +914,9 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
           factDimMap.get(cfact).addAll(factExprDimTables);
         }
       }
+      if (cfacts.size() > 1) {
+        havingAST = MultiFactHQLContext.pushDownHaving(havingAST, this, 
cfacts);
+      }
     } else {
       // dim only query
       exprDimensions.addAll(exprCtx.rewriteExprCtx(null, dimsToQuery, this));
@@ -957,6 +967,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
         for (CandidateFact cfact : cfacts) {
           cfact.updateASTs(this);
         }
+        whereAST = MultiFactHQLContext.convertHavingToWhere(havingAST, this, 
cfacts, new DefaultAliasDecider());
       }
     }
     hqlContext = createHQLContext(cfacts, dimsToQuery, factDimMap);
@@ -1069,6 +1080,7 @@ public class CubeQueryContext implements 
TrackQueriedColumns, QueryAST {
 
     return isCubeMeasure(msrname);
   }
+
   public boolean isAggregateExpr(String expr) {
     return aggregateExprs.contains(expr == null ? null : expr.toLowerCase());
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
new file mode 100644
index 0000000..dadbfa0
--- /dev/null
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultAliasDecider.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+
+
+public class DefaultAliasDecider implements AliasDecider {
+  int counter = 0;
+  private static final String ALIAS_PREFIX = "alias";
+
+  public String decideAlias(ASTNode node) {
+    return ALIAS_PREFIX + (counter++);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 5ff265d..fa81831 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -465,7 +465,9 @@ class ExpressionResolver implements ContextRewriter {
       replaceAST(cubeql, queryAST.getWhereAST());
       replaceAST(cubeql, queryAST.getJoinAST());
       replaceAST(cubeql, queryAST.getGroupByAST());
-      replaceAST(cubeql, queryAST.getHavingAST());
+      // Having AST is resolved by each fact, so that all facts can expand 
their expressions.
+      // Having ast is not copied now, it's maintained in cubeql, each fact 
processes that serially.
+      replaceAST(cubeql, cubeql.getHavingAST());
       replaceAST(cubeql, cubeql.getOrderByAST());
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index c9aff5d..fdef3f1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -41,6 +41,8 @@ import org.antlr.runtime.tree.Tree;
 
 import com.google.common.base.Optional;
 
+import com.google.common.collect.Sets;
+import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
 
@@ -81,6 +83,7 @@ public final class HQLParser {
   }
 
   public static final Set<Integer> BINARY_OPERATORS;
+  public static final Set<Integer> FILTER_OPERATORS;
   public static final Set<Integer> ARITHMETIC_OPERATORS;
   public static final Set<Integer> UNARY_OPERATORS;
   public static final Set<Integer> PRIMITIVE_TYPES;
@@ -140,6 +143,9 @@ public final class HQLParser {
     primitiveTypes.add(TOK_VARCHAR);
     primitiveTypes.add(TOK_CHAR);
     PRIMITIVE_TYPES = Collections.unmodifiableSet(primitiveTypes);
+
+    FILTER_OPERATORS = Sets.newHashSet(KW_IN, GREATERTHAN, 
GREATERTHANOREQUALTO, LESSTHAN, LESSTHANOREQUALTO, EQUAL,
+      EQUAL_NS);
   }
 
   public static boolean isArithmeticOp(int tokenType) {
@@ -840,4 +846,39 @@ public final class HQLParser {
     }
     return node;
   }
+  @Data
+  public static class HashableASTNode {
+    private ASTNode ast;
+    private int hashCode = -1;
+    private boolean hashCodeComputed = false;
+
+    public HashableASTNode(ASTNode ast) {
+      this.ast = ast;
+    }
+
+    public void setAST(ASTNode ast) {
+      this.ast = ast;
+      hashCodeComputed = false;
+    }
+
+    public ASTNode getAST() {
+      return ast;
+    }
+
+    @Override
+    public int hashCode() {
+      if (!hashCodeComputed) {
+        hashCode = getString(ast).hashCode();
+        hashCodeComputed = true;
+      }
+      return hashCode;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() 
&& getString(this.getAST())
+        .trim().equalsIgnoreCase(getString(((HashableASTNode) 
o).getAST()).trim());
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
index 1a729f8..9c18b7e 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
@@ -18,20 +18,28 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.HQLParser.*;
+
 import java.util.*;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
 
+import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+
+import org.antlr.runtime.CommonToken;
 
 import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
 
 /**
  * Writes a join query with all the facts involved, with where, groupby and 
having expressions pushed down to the fact
  * queries.
  */
+@Slf4j
 class MultiFactHQLContext extends SimpleHQLContext {
 
   private Set<CandidateFact> facts;
@@ -76,7 +84,7 @@ class MultiFactHQLContext extends SimpleHQLContext {
   }
 
   private String getWhereString() {
-    return null;
+    return query.getWhereTree();
   }
 
   public String toHQL() throws LensException {
@@ -150,4 +158,79 @@ class MultiFactHQLContext extends SimpleHQLContext {
     }
     return fromBuilder.toString();
   }
+
+
+  public static ASTNode convertHavingToWhere(ASTNode havingAST, 
CubeQueryContext context, Set<CandidateFact> cfacts,
+    AliasDecider aliasDecider) throws LensException {
+    if (havingAST == null) {
+      return null;
+    }
+    if (isAggregateAST(havingAST) || isTableColumnAST(havingAST) || 
isNonAggregateFunctionAST(havingAST)) {
+      // if already present in select, pick alias
+      String alias = null;
+      for (CandidateFact fact : cfacts) {
+        if (fact.isExpressionAnswerable(havingAST, context)) {
+          alias = fact.addAndGetAliasFromSelect(havingAST, aliasDecider);
+          return new ASTNode(new CommonToken(HiveParser.Identifier, alias));
+        }
+      }
+    }
+    if (havingAST.getChildren() != null) {
+      for (int i = 0; i < havingAST.getChildCount(); i++) {
+        ASTNode replaced = convertHavingToWhere((ASTNode) 
havingAST.getChild(i), context, cfacts, aliasDecider);
+        havingAST.setChild(i, replaced);
+      }
+    }
+    return havingAST;
+  }
+
+  public static ASTNode pushDownHaving(ASTNode ast, CubeQueryContext 
cubeQueryContext, Set<CandidateFact> cfacts)
+    throws LensException {
+    if (ast == null) {
+      return null;
+    }
+    if (ast.getType() == HiveParser.KW_AND || ast.getType() == 
HiveParser.TOK_HAVING) {
+      List<ASTNode> children = Lists.newArrayList();
+      for (Node child : ast.getChildren()) {
+        ASTNode newChild = pushDownHaving((ASTNode) child, cubeQueryContext, 
cfacts);
+        if (newChild != null) {
+          children.add(newChild);
+        }
+      }
+      if (children.size() == 0) {
+        return null;
+      } else if (children.size() == 1) {
+        return children.get(0);
+      } else {
+        ASTNode newASTNode = new ASTNode(ast.getToken());
+        for (ASTNode child : children) {
+          newASTNode.addChild(child);
+        }
+        return newASTNode;
+      }
+    }
+    if (isPrimitiveBooleanExpression(ast)) {
+      CandidateFact fact = pickFactToPushDown(ast, cubeQueryContext, cfacts);
+      if (fact == null) {
+        return ast;
+      }
+      fact.addToHaving(ast);
+      return null;
+    }
+    return ast;
+  }
+
+  private static CandidateFact pickFactToPushDown(ASTNode ast, 
CubeQueryContext cubeQueryContext, Set<CandidateFact>
+    cfacts) throws LensException {
+    for (CandidateFact fact : cfacts) {
+      if (fact.isExpressionAnswerable(ast, cubeQueryContext)) {
+        return fact;
+      }
+    }
+    return null;
+  }
+
+  private static boolean isPrimitiveBooleanExpression(ASTNode ast) {
+    return HQLParser.FILTER_OPERATORS.contains(ast.getType());
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
index 7e3a0bf..9f16c5a 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
@@ -38,52 +38,13 @@ import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 
 import org.antlr.runtime.CommonToken;
-import org.antlr.runtime.tree.Tree;
-
-import lombok.Data;
-
 
 public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
 
   private final QueryAST ast;
-  int aliasCounter = 0;
-
-  @Data
-  public static class HashableASTNode {
-    private ASTNode ast;
-    private int hashCode = -1;
-    private boolean hashCodeComputed = false;
-
-    public HashableASTNode(ASTNode ast) {
-      this.ast = ast;
-    }
-
-    public void setAST(ASTNode ast) {
-      this.ast = ast;
-      hashCodeComputed = false;
-    }
-
-    public ASTNode getAST() {
-      return ast;
-    }
-
-    @Override
-    public int hashCode() {
-      if (!hashCodeComputed) {
-        hashCode = getString(ast).hashCode();
-        hashCodeComputed = true;
-      }
-      return hashCode;
-    }
-
-    @Override
-    public boolean equals(Object o) {
-      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() 
&& getString(this.getAST())
-        .trim().equalsIgnoreCase(getString(((HashableASTNode) 
o).getAST()).trim());
-    }
-  }
 
   private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
+  private AliasDecider aliasDecider = new DefaultAliasDecider();
 
   SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, 
CandidateDim> dimsToQuery,
     CubeQueryContext query, QueryAST ast)
@@ -174,7 +135,7 @@ public class SingleFactMultiStorageHQLContext extends 
UnionHQLContext {
       ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new 
CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = decideAlias(astNode);
+      String alias = aliasDecider.decideAlias(astNode);
       ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
       innerSelectExprAST.addChild(aliasNode);
       addToInnerSelectAST(innerSelectExprAST);
@@ -192,7 +153,7 @@ public class SingleFactMultiStorageHQLContext extends 
UnionHQLContext {
       ASTNode innerSelectASTWithoutAlias = copyAST(astNode);
       ASTNode innerSelectExprAST = new ASTNode(new 
CommonToken(HiveParser.TOK_SELEXPR));
       innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = decideAlias(astNode);
+      String alias = aliasDecider.decideAlias(astNode);
       ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
       innerSelectExprAST.addChild(aliasNode);
       addToInnerSelectAST(innerSelectExprAST);
@@ -249,11 +210,6 @@ public class SingleFactMultiStorageHQLContext extends 
UnionHQLContext {
     return child;
   }
 
-  private String decideAlias(Tree child) {
-    // Can add intelligence in aliases someday. Not required though :)
-    return "alias" + (aliasCounter++);
-  }
-
   private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact 
fact, Map<Dimension, CandidateDim>
     dimsToQuery, CubeQueryContext query, QueryAST ast)
     throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/2f0e5fdb/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 57a15e2..8aab777 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -23,6 +23,7 @@ import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.DateUtil.*;
 import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.MISSING_PARTITIONS;
 import static org.apache.lens.cube.parse.CubeTestSetup.*;
+import static org.apache.lens.cube.parse.TestCubeRewriter.compareContains;
 import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
@@ -188,8 +189,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
         null, getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(lower.startsWith("select mq2.roundedmsr2 roundedmsr2, mq1.msr12 
msr12 from ")
       || lower.startsWith("select mq1.roundedmsr2 roundedmsr2, mq2.msr12 msr12 
from "), hqlQuery);
@@ -206,8 +207,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, 
"C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, 
mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
@@ -229,8 +230,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, 
"C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, 
mq2.roundedmsr2 roundedmsr2, mq1.msr12 msr12 from ")
@@ -254,8 +255,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
             "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
             " group by basecube.dim1", 
getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
             lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, 
mq2.roundedmsr2 roundedmsr2, "
@@ -278,8 +279,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, 
"C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     String lower = hqlQuery.toLowerCase();
     assertTrue(
       lower.startsWith("select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.msr12 
msr12, mq1.roundedmsr2 roundedmsr2 from ")
@@ -305,9 +306,9 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected3 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `dim1`, 
max(basecube.msr13) as `msr13` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, 
"c1_testfact3_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
-    TestCubeRewriter.compareContains(expected3, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    compareContains(expected3, hqlQuery);
     assertTrue(
       hqlQuery.toLowerCase().startsWith(
         "select coalesce(mq1.dim1, mq2.dim1, mq3.dim1) dim1, mq1.msr12 msr12,"
@@ -342,8 +343,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         cubeName,
         "select basecube.dim1 as `dim1`, basecube.dim11 as `dim11`, 
round(sum(basecube.msr2)/1000) as `roundedmsr2` "
         + "FROM ", null, " group by basecube.dim1", 
getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, coalesce(mq1.dim11, 
mq2.dim11) dim11,"
         + " mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from ")
@@ -365,8 +366,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim1 as `dim1`, round(basecube.msr2/1000) as 
`roundedmsr2` FROM ", null, null,
         getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, mq1.msr11 msr11, 
mq2.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -387,8 +388,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `d1`, 
round(sum(basecube.msr2)/1000) as `m2` FROM ", null,
         " group by basecube.dim1", getWhereForDailyAndHourly2days(cubeName, 
"C1_testFact1_BASE"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.d1, mq2.d1) d1, mq2.expr2 `my msr12`, mq1.m2 m2 
from ")
       ||
@@ -407,8 +408,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 =
       getExpectedQuery(cubeName, "select basecube.dim1 as `dim1`, 
avg(basecube.msr2)) as `msr2` FROM ", null,
         " group by basecube.dim1", getWhereForHourly2days(cubeName, 
"C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim1, mq2.dim1) dim1, mq2.msr12 msr12, mq1.msr2 
msr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -430,8 +431,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
         "select dim2chain.name as `name`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", " JOIN " + getDbName()
             + "c1_testdim2tbl dim2chain ON basecube.dim2 = " + " dim2chain.id 
and (dim2chain.dt = 'latest') ", null,
         " group by dim2chain.name", null, getWhereForHourly2days(cubeName, 
"C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.name, mq2.name) name, mq2.msr12 msr12, 
mq1.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -451,8 +452,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     String expected2 = getExpectedQuery(cubeName,
         "select basecube.dim2 as `dim2`, round(sum(basecube.msr2)/1000) as 
`roundedmsr2` FROM ", null,
         " group by basecube.dim2", getWhereForHourly2days(cubeName, 
"C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith(
       "select coalesce(mq1.dim2, mq2.dim2) dim2, mq2.msr13 msr13, 
mq1.roundedmsr2 roundedmsr2 from ")
       || hqlQuery.toLowerCase().startsWith(
@@ -480,8 +481,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
           + " round(sum(basecube.msr2)/1000) as `msr2` FROM ", null,
         " group by basecube.dim1 != 'x' AND basecube.dim2 != 10",
         getWhereForHourly2days(cubeName, "C1_testfact1_raw_base"));
-    TestCubeRewriter.compareContains(expected1, hqlQuery);
-    TestCubeRewriter.compareContains(expected2, hqlQuery);
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase()
       .startsWith("select coalesce(mq1.booleancut, mq2.booleancut) booleancut, 
mq2.msr2 msr2,"
         + " mq1.expr3 expr3 from ")
@@ -570,4 +571,198 @@ public class TestBaseCubeQueries extends TestQueryRewrite 
{
       }
     }
   }
+  @Test
+  public void testMultiFactQueryWithHaving() throws Exception {
+
+    String hqlQuery, expected1, expected2;
+    String endSubString = "mq2 on mq1.dim1 <=> mq2.dim1 AND mq1.dim11 <=> 
mq2.dim11";
+    String joinSubString = "mq1 full outer join ";
+
+    // only One having clause, that too answerable from one fact
+    hqlQuery = rewrite("select dim1, dim11, msr12 from basecube where " + 
TWO_DAYS_RANGE
+      + "having roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
sum(basecube.msr12) as msr12 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having 
round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertTrue(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString), hqlQuery);
+
+    // Two having clause, one from each fact.
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube 
where " + TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
sum(basecube.msr12) as msr12 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING 
sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING 
round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq1.msr12 msr12, 
mq2.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+        + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, 
mq1.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString), hqlQuery);
+
+    // Two having clauses and one complex expression in having which needs to 
be split over the two facts
+    // And added as where clause outside
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube 
where " + TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000 and msr12 > 2 and roundedmsr2 
> 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
sum(basecube.msr12) as msr12 , "
+        + "floor(sum(basecube.msr12)) as alias0 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING 
sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, 
mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+        + "coalesce(mq1.dim11, mq2.dim11) dim11, mq1.msr12 msr12, 
mq2.roundedmsr2 roundedmsr2 from "), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) 
<=  1000 )"), hqlQuery);
+
+    // No push-down-able having clauses.
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube 
where " + TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
sum(basecube.msr12) as msr12, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, 
mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), 
hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  roundedmsr2 ) 
<=  1000 )"), hqlQuery);
+
+    // function over expression of two functions over measures
+    hqlQuery = rewrite("select dim1, dim11, msr12, roundedmsr2 from basecube 
where " + TWO_DAYS_RANGE
+      + "having round(flooredmsr12+roundedmsr2) <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
sum(basecube.msr12) as msr12, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2)/1000) as roundedmsr2 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, "
+      + "coalesce(mq1.dim11, mq2.dim11) dim11, mq2.msr12 msr12, 
mq1.roundedmsr2 roundedmsr2 from ")
+      || hqlQuery.toLowerCase().startsWith("select coalesce(mq1.dim1, 
mq2.dim1) dim1, coalesce(mq1.dim11, mq2.dim11) "
+        + "dim11, mq1.msr12 msr12, mq2.roundedmsr2 roundedmsr2 from "), 
hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (round(( alias0  +  
roundedmsr2 )) <=  1000 )"), hqlQuery);
+
+
+    // Following test cases only select dimensions, and all the measures are 
in having.
+    // Mostly tests follow the same pattern as the above tests,
+    // The extra thing to test is the inclusion of sub-expressions in select 
clauses.
+
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING 
sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 HAVING 
round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+    String begin = "select coalesce(mq1.dim1, mq2.dim1) dim1, 
coalesce(mq1.dim11, mq2.dim11) dim11 from ";
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString) && 
hqlQuery.endsWith(endSubString), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
+      + "having flooredmsr12+roundedmsr2 <= 1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "floor(sum(basecube.msr12)) as alias0 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2/1000)) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  
1000 )"), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
+      + "having msr12 > 2 and roundedmsr2 > 0 and flooredmsr12+roundedmsr2 <= 
1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "floor(sum(( basecube . msr12 ))) as `alias0` FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having 
sum(basecube.msr12) > 2",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2)/1000) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11 having 
round(sum(basecube.msr2)/1000) > 0",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + " WHERE (( alias0  +  alias1 ) <=  
1000 )"), hqlQuery);
+
+    hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
+      + "having msr12 > 2 or roundedmsr2 > 0 or flooredmsr12+roundedmsr2 <= 
1000", conf);
+    expected1 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, "
+        + "sum(basecube.msr12) as alias0, floor(sum(basecube.msr12)) as alias2 
FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact2_BASE"));
+    expected2 = getExpectedQuery(cubeName,
+      "select basecube.dim1 as dim1, basecube.dim11 as dim11, 
round(sum(basecube.msr2)/1000) as alias1 FROM ",
+      null, " group by basecube.dim1, basecube.dim11",
+      getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
+    String havingToWhere = " WHERE ((( alias0  >  2 ) or ( alias1  >  0 )) or 
(( alias2  +  alias1 ) <=  1000 ))";
+
+    assertFalse(hqlQuery.toLowerCase().contains("having"));
+    compareContains(expected1, hqlQuery);
+    compareContains(expected2, hqlQuery);
+    assertTrue(hqlQuery.toLowerCase().startsWith(begin), hqlQuery);
+    assertTrue(hqlQuery.contains(joinSubString)
+      && hqlQuery.endsWith(endSubString + havingToWhere), hqlQuery);
+  }
 }

Reply via email to