http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
index 7f07dbc..757a877 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
@@ -18,13 +18,27 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.*;
+import static java.util.Comparator.comparing;
+
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
+import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
+import lombok.Getter;
 
 /**
  * Represents a union of two candidates
@@ -34,20 +48,21 @@ public class UnionCandidate implements Candidate {
   /**
    * Caching start and end time calculated for this candidate as it may have 
many child candidates.
    */
-  Date startTime = null;
-  Date endTime = null;
-  String toStr;
-  CubeQueryContext cubeql;
+  private Date startTime = null;
+  private Date endTime = null;
+  private String toStr;
+  @Getter
+  CubeQueryContext cubeQueryContext;
   /**
    * List of child candidates that will be union-ed
    */
-  private List<Candidate> childCandidates;
-  private QueryAST queryAst;
+  @Getter
+  private List<Candidate> children;
+
   private Map<TimeRange, Map<Candidate, TimeRange>> splitTimeRangeMap = 
Maps.newHashMap();
-  public UnionCandidate(List<Candidate> childCandidates, CubeQueryContext 
cubeql) {
-    this.childCandidates = childCandidates;
-    //this.alias = alias;
-    this.cubeql = cubeql;
+  UnionCandidate(Collection<? extends Candidate> childCandidates, 
CubeQueryContext cubeQueryContext) {
+    this.children = Lists.newArrayList(childCandidates);
+    this.cubeQueryContext = cubeQueryContext;
   }
 
   @Override
@@ -57,6 +72,15 @@ public class UnionCandidate implements Candidate {
   }
 
   @Override
+  public boolean isPhraseAnswerable(QueriedPhraseContext phrase) throws 
LensException {
+    for (Candidate cand : getChildren()) {
+      if (!cand.isPhraseAnswerable(phrase)) {
+        return false;
+      }
+    }
+    return true;
+  }
+
   public boolean isTimeRangeCoverable(TimeRange timeRange) throws 
LensException {
     Map<Candidate, TimeRange> candidateRange = getTimeRangeSplit(timeRange);
     for (Map.Entry<Candidate, TimeRange> entry : candidateRange.entrySet()) {
@@ -68,25 +92,52 @@ public class UnionCandidate implements Candidate {
   }
 
   @Override
+  public void addAnswerableMeasurePhraseIndices(int index) {
+    for (Candidate candidate : getChildren()) {
+      candidate.addAnswerableMeasurePhraseIndices(index);
+    }
+  }
+
+  @Override
+  public boolean isColumnValidForRange(String column) {
+    return getChildren().stream().anyMatch(candidate -> 
candidate.isColumnValidForRange(column));
+  }
+
+  @Override
+  public Optional<Date> getColumnStartTime(String column) {
+    return getChildren().stream()
+      .map(x->x.getColumnStartTime(column))
+      .filter(Optional::isPresent)
+      .map(Optional::get)
+      .min(Comparator.naturalOrder());
+  }
+
+  @Override
+  public Optional<Date> getColumnEndTime(String column) {
+    return getChildren().stream()
+      .map(x->x.getColumnEndTime(column))
+      .filter(Optional::isPresent)
+      .map(Optional::get)
+      .max(Comparator.naturalOrder());
+  }
+
+
+  @Override
   public Collection<String> getColumns() {
     // In UnionCandidate all columns are same, return the columns
     // of first child
-    return childCandidates.iterator().next().getColumns();
+    return children.iterator().next().getColumns();
   }
 
   @Override
   public Date getStartTime() {
     //Note: concurrent calls not handled specifically (This should not be a 
problem even if we do
     //get concurrent calls).
-
     if (startTime == null) {
-      Date minStartTime = childCandidates.get(0).getStartTime();
-      for (Candidate child : childCandidates) {
-        if (child.getStartTime().before(minStartTime)) {
-          minStartTime = child.getStartTime();
-        }
-      }
-      startTime = minStartTime;
+      startTime = children.stream()
+        .map(Candidate::getStartTime)
+        .min(Comparator.naturalOrder())
+        .orElseGet(() -> new Date(Long.MIN_VALUE)); // this should be 
redundant.
     }
     return startTime;
   }
@@ -94,13 +145,10 @@ public class UnionCandidate implements Candidate {
   @Override
   public Date getEndTime() {
     if (endTime == null) {
-      Date maxEndTime = childCandidates.get(0).getEndTime();
-      for (Candidate child : childCandidates) {
-        if (child.getEndTime().after(maxEndTime)) {
-          maxEndTime = child.getEndTime();
-        }
-      }
-      endTime = maxEndTime;
+      endTime = children.stream()
+        .map(Candidate::getEndTime)
+        .max(Comparator.naturalOrder())
+        .orElseGet(() -> new Date(Long.MAX_VALUE)); // this should be redundant
     }
     return endTime;
   }
@@ -108,7 +156,7 @@ public class UnionCandidate implements Candidate {
   @Override
   public double getCost() {
     double cost = 0.0;
-    for (TimeRange timeRange : cubeql.getTimeRanges()) {
+    for (TimeRange timeRange : getCubeQueryContext().getTimeRanges()) {
       for (Map.Entry<Candidate, TimeRange> entry : 
getTimeRangeSplit(timeRange).entrySet()) {
         cost += entry.getKey().getCost() * entry.getValue().milliseconds() / 
timeRange.milliseconds();
       }
@@ -121,7 +169,7 @@ public class UnionCandidate implements Candidate {
     if (this.equals(candidate)) {
       return true;
     }
-    for (Candidate child : childCandidates) {
+    for (Candidate child : children) {
       if (child.contains((candidate))) {
         return true;
       }
@@ -129,11 +177,6 @@ public class UnionCandidate implements Candidate {
     return false;
   }
 
-  @Override
-  public Collection<Candidate> getChildren() {
-    return childCandidates;
-  }
-
   /**
    * @param timeRange
    * @return
@@ -152,7 +195,7 @@ public class UnionCandidate implements Candidate {
   @Override
   public Set<FactPartition> getParticipatingPartitions() {
     Set<FactPartition> factPartitionSet = new HashSet<>();
-    for (Candidate c : childCandidates) {
+    for (Candidate c : children) {
       factPartitionSet.addAll(c.getParticipatingPartitions());
     }
     return factPartitionSet;
@@ -160,13 +203,30 @@ public class UnionCandidate implements Candidate {
 
   @Override
   public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext 
expr) {
-    for (Candidate cand : childCandidates) {
-      if (!cand.isExpressionEvaluable(expr)) {
+    return children.stream().allMatch(cand-> cand.isExpressionEvaluable(expr));
+  }
+
+  @Override
+  public boolean isExpressionEvaluable(String expr) {
+    return children.stream().allMatch(cand->cand.isExpressionEvaluable(expr));
+  }
+
+  @Override
+  public boolean isDimAttributeEvaluable(String dim) throws LensException {
+    for (Candidate childCandidate : children) {
+      if (!childCandidate.isDimAttributeEvaluable(dim)) {
         return false;
       }
     }
     return true;
   }
+  public UnionCandidate explode() throws LensException {
+    ListIterator<Candidate> i = children.listIterator();
+    while(i.hasNext()) {
+      i.set(i.next().explode());
+    }
+    return this;
+  }
 
   @Override
   public String toString() {
@@ -177,11 +237,11 @@ public class UnionCandidate implements Candidate {
   }
 
   private String getToString() {
-    StringBuilder builder = new StringBuilder(10 * childCandidates.size());
+    StringBuilder builder = new StringBuilder(10 * children.size());
     builder.append("UNION[");
-    for (Candidate candidate : childCandidates) {
+    for (Candidate candidate : children) {
       builder.append(candidate.toString());
-      builder.append(", ");
+      builder.append("; ");
     }
     builder.delete(builder.length() - 2, builder.length());
     builder.append("]");
@@ -196,13 +256,13 @@ public class UnionCandidate implements Candidate {
    * @return
    */
   private Map<Candidate, TimeRange> splitTimeRangeForChildren(TimeRange 
timeRange) {
-    childCandidates.sort(Comparator.comparing(Candidate::getCost));
+    children.sort(comparing(Candidate::getCost));
     Map<Candidate, TimeRange> childrenTimeRangeMap = new HashMap<>();
     // Sorted list based on the weights.
     Set<TimeRange> ranges = new HashSet<>();
     ranges.add(timeRange);
-    for (Candidate c : childCandidates) {
-      TimeRange.TimeRangeBuilder builder = getClonedBuiler(timeRange);
+    for (Candidate c : children) {
+      TimeRange.TimeRangeBuilder builder = timeRange.cloneAsBuilder();
       TimeRange tr = resolveTimeRangeForChildren(c, ranges, builder);
       if (tr != null) {
         // If the time range is not null it means this child candidate is 
valid for this union candidate.
@@ -262,9 +322,9 @@ public class UnionCandidate implements Candidate {
       if (ret.getFromDate().getTime() == range.getFromDate().getTime()) {
         checkAndUpdateNewTimeRanges(ret, range, newTimeRanges);
       } else {
-        TimeRange.TimeRangeBuilder b1 = getClonedBuiler(ret);
-        b1.fromDate(range.getFromDate());
-        b1.toDate(ret.getFromDate());
+        TimeRange.TimeRangeBuilder b1 = ret.cloneAsBuilder()
+          .fromDate(range.getFromDate())
+          .toDate(ret.getFromDate());
         newTimeRanges.add(b1.build());
         checkAndUpdateNewTimeRanges(ret, range, newTimeRanges);
 
@@ -277,19 +337,10 @@ public class UnionCandidate implements Candidate {
 
   private void checkAndUpdateNewTimeRanges(TimeRange ret, TimeRange range, 
Set<TimeRange> newTimeRanges) {
     if (ret.getToDate().getTime() < range.getToDate().getTime()) {
-      TimeRange.TimeRangeBuilder b2 = getClonedBuiler(ret);
+      TimeRange.TimeRangeBuilder b2 = ret.cloneAsBuilder();
       b2.fromDate(ret.getToDate());
       b2.toDate(range.getToDate());
       newTimeRanges.add(b2.build());
     }
   }
-
-  private TimeRange.TimeRangeBuilder getClonedBuiler(TimeRange timeRange) {
-    TimeRange.TimeRangeBuilder builder = new TimeRange.TimeRangeBuilder();
-    builder.astNode(timeRange.getAstNode());
-    builder.childIndex(timeRange.getChildIndex());
-    builder.parent(timeRange.getParent());
-    builder.partitionColumn(timeRange.getPartitionColumn());
-    return builder;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
index 267d85b..1b4cc10 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -19,20 +19,21 @@
 
 package org.apache.lens.cube.parse;
 
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toList;
+
 import static org.apache.lens.cube.parse.HQLParser.*;
 
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.util.StringUtils;
 
 import org.antlr.runtime.CommonToken;
 
@@ -43,40 +44,44 @@ import lombok.extern.slf4j.Slf4j;
  * this class rewrites union query for all the participating StorageCandidates.
  */
 @Slf4j
-public class UnionQueryWriter {
+public class UnionQueryWriter extends SimpleHQLContext {
 
-  private QueryAST queryAst;
   private Map<HQLParser.HashableASTNode, ASTNode> innerToOuterSelectASTs = new 
HashMap<>();
   private Map<HQLParser.HashableASTNode, ASTNode> innerToOuterHavingASTs = new 
HashMap<>();
   private Map<String, ASTNode> storageCandidateToSelectAstMap = new 
HashMap<>();
-  private AliasDecider aliasDecider = new DefaultAliasDecider();
   private CubeQueryContext cubeql;
-  Collection<StorageCandidate> storageCandidates;
-  public static final String DEFAULT_MEASURE = "0.0";
+  static final ASTNode DEFAULT_MEASURE_AST;
+  private static final String DEFAULT_MEASURE = "0.0";
+  static {
+    try {
+      DEFAULT_MEASURE_AST = HQLParser.parseExpr(DEFAULT_MEASURE);
+    } catch (LensException e) {
+      throw new RuntimeException("default measure not parsable");
+    }
+  }
+  Collection<StorageCandidateHQLContext> storageCandidates;
   public static final String DUPLICATE_EXPRESSION_PREFIX = "D";
 
-  public UnionQueryWriter(Collection<StorageCandidate> storageCandidates, 
CubeQueryContext cubeql) {
-    if (storageCandidates == null || storageCandidates.size()<=1) {
+  UnionQueryWriter(List<StorageCandidateHQLContext> storageCandidates, 
CubeQueryContext cubeql) throws LensException {
+    
super(DefaultQueryAST.fromStorageCandidate(storageCandidates.iterator().next()));
+    this.storageCandidates = storageCandidates;
+    if (storageCandidates.size() <= 1) {
       throw new IllegalArgumentException("There should be atleast two storage 
candidates to write a union query");
     }
     this.cubeql = cubeql;
-    this.storageCandidates = storageCandidates;
   }
 
-  public String toHQL(Map<StorageCandidate, Set<Dimension>> factDimMap) throws 
LensException {
-    StorageCandidate firstCandidate = storageCandidates.iterator().next();
+  @Override
+  protected void setMissingExpressions() throws LensException {
     // Set the default queryAST for the outer query
-    queryAst = DefaultQueryAST.fromStorageCandidate(firstCandidate,
-        firstCandidate.getQueryAst());
     updateAsts();
     updateInnterSelectASTWithDefault();
     processSelectAndHavingAST();
     processGroupByAST();
     processOrderByAST();
     CandidateUtil.updateFinalAlias(queryAst.getSelectAST(), cubeql);
-    return CandidateUtil.buildHQLString(queryAst.getSelectString(), 
getFromString(factDimMap), null,
-        queryAst.getGroupByString(), queryAst.getOrderByString(),
-        queryAst.getHavingString(), queryAst.getLimitValue());
+    setPrefix(cubeql.getInsertClause());
+    setFrom(getFromString());
   }
 
   /**
@@ -84,18 +89,12 @@ public class UnionQueryWriter {
    * being constructed from StorageCandidate.
    */
   private void updateAsts() {
-    for (StorageCandidate sc : storageCandidates) {
-      storageCandidateToSelectAstMap.put(sc.toString(),
+    for (StorageCandidateHQLContext sc : storageCandidates) {
+      storageCandidateToSelectAstMap.put(sc.getStorageCandidate().toString(),
           new ASTNode(new CommonToken(TOK_SELECT, "TOK_SELECT")));
-      if (sc.getQueryAst().getHavingAST() != null) {
-        sc.getQueryAst().setHavingAST(null);
-      }
-      if (sc.getQueryAst().getOrderByAST() != null) {
-        sc.getQueryAst().setOrderByAST(null);
-      }
-      if (sc.getQueryAst().getLimitValue() != null) {
-        sc.getQueryAst().setLimitValue(null);
-      }
+      sc.getQueryAst().setHavingAST(null);
+      sc.getQueryAst().setOrderByAST(null);
+      sc.getQueryAst().setLimitValue(null);
     }
   }
 
@@ -242,7 +241,7 @@ public class UnionQueryWriter {
    */
   private ASTNode getAggregateNodesExpression(int position) {
     ASTNode node = null;
-    for (StorageCandidate sc : storageCandidates) {
+    for (StorageCandidateHQLContext sc : storageCandidates) {
       node = (ASTNode) 
sc.getQueryAst().getSelectAST().getChild(position).getChild(0);
       if (HQLParser.isAggregateAST(node) || HQLParser.hasAggregate(node)) {
         return MetastoreUtil.copyAST(node);
@@ -257,13 +256,10 @@ public class UnionQueryWriter {
    * @param node
    * @return
    */
-  private boolean isNodeAnswerableForStorageCandidate(StorageCandidate sc, 
ASTNode node) {
+  private boolean isNodeNotAnswerableForStorageCandidate(StorageCandidate sc, 
ASTNode node) {
     Set<String> cols = new LinkedHashSet<>();
     getAllColumnsOfNode(node, cols);
-    if (!sc.getColumns().containsAll(cols)) {
-      return true;
-    }
-    return false;
+    return !sc.getColumns().containsAll(cols);
   }
 
   /**
@@ -275,7 +271,7 @@ public class UnionQueryWriter {
    */
   private ASTNode setDefaultValueInExprForAggregateNodes(ASTNode node, 
StorageCandidate sc) throws LensException {
     if (HQLParser.isAggregateAST(node)
-        && isNodeAnswerableForStorageCandidate(sc, node)) {
+        && isNodeNotAnswerableForStorageCandidate(sc, node)) {
       node.setChild(1, getSelectExpr(null, null, true));
     }
     for (int i = 0; i < node.getChildCount(); i++) {
@@ -286,11 +282,7 @@ public class UnionQueryWriter {
   }
 
   private boolean isAggregateFunctionUsedInAST(ASTNode node) {
-    if (HQLParser.isAggregateAST(node)
-        || HQLParser.hasAggregate(node)) {
-      return true;
-    }
-    return false;
+    return HQLParser.isAggregateAST(node) || HQLParser.hasAggregate(node);
   }
 
   private boolean isNodeDefault(ASTNode node) {
@@ -305,11 +297,9 @@ public class UnionQueryWriter {
   private List<ASTNode> getProjectedNonDefaultPhrases() {
     List<ASTNode> phrases = new ArrayList<>();
     for (int i = 0; i < 
storageCandidates.iterator().next().getQueryAst().getSelectAST().getChildCount();
 i++) {
-      for (StorageCandidate sc : storageCandidates) {
+      for (StorageCandidateHQLContext sc : storageCandidates) {
         ASTNode selectAST = sc.getQueryAst().getSelectAST();
-        if (isNodeDefault((ASTNode) selectAST.getChild(i))) {
-          continue;
-        } else {
+        if (!isNodeDefault((ASTNode) selectAST.getChild(i))) {
           phrases.add((ASTNode) selectAST.getChild(i));
           break;
         }
@@ -320,22 +310,13 @@ public class UnionQueryWriter {
 
   private void removeRedundantProjectedPhrases() {
     List<ASTNode> phrases = getProjectedNonDefaultPhrases();
-    List<String> phrasesWithoutAlias = new ArrayList<>();
-    // populate all phrases without alias
-    for (ASTNode node : phrases) {
-      phrasesWithoutAlias.add(HQLParser.getString((ASTNode) node.getChild(0)));
-    }
+    List<String> phrasesWithoutAlias = phrases.stream().map(node -> 
node.getChild(0))
+      .map(ASTNode.class::cast).map(HQLParser::getString).collect(toList());
     Map<String, List<Integer>> phraseCountMap = new HashMap<>();
     Map<String, List<String>> aliasMap = new HashMap<>();
     for (int i = 0; i < phrasesWithoutAlias.size(); i++) {
       String phrase = phrasesWithoutAlias.get(i);
-      if (phraseCountMap.containsKey(phrase)) {
-        phraseCountMap.get(phrase).add(i);
-      } else {
-        List<Integer> indices = new ArrayList<>();
-        indices.add(i);
-        phraseCountMap.put(phrase, indices);
-      }
+      phraseCountMap.computeIfAbsent(phrase, x->new ArrayList()).add(i);
     }
     for (List<Integer> values : phraseCountMap.values()) {
       if (values.size() > 1) {
@@ -354,16 +335,17 @@ public class UnionQueryWriter {
       if (phraseCountMap.get(col).size() > 1) {
         List<Integer> childenToDelete = phraseCountMap.get(col).
             subList(1, phraseCountMap.get(col).size());
+        int counter = 0;
         for (int i : childenToDelete) {
-          for (StorageCandidate sc : storageCandidates) {
-            sc.getQueryAst().getSelectAST().setChild(i,
-                new ASTNode(new CommonToken(HiveParser.Identifier, 
DUPLICATE_EXPRESSION_PREFIX)));
+          for (StorageCandidateHQLContext sc : storageCandidates) {
+            sc.getQueryAst().getSelectAST().deleteChild(i - counter);
           }
+          counter++;
         }
       }
     }
 
-    for (StorageCandidate sc : storageCandidates) {
+    for (StorageCandidateHQLContext sc : storageCandidates) {
       for (Node node : sc.getQueryAst().getSelectAST().getChildren()) {
         ASTNode selectNode = (ASTNode) node;
         if (selectNode.getToken().getType() == HiveParser.Identifier
@@ -379,10 +361,8 @@ public class UnionQueryWriter {
     }
   }
 
-  public void updateOuterASTDuplicateAliases(ASTNode node,
-      Map<String, List<String>> aliasMap) {
+  private void updateOuterASTDuplicateAliases(ASTNode node, Map<String, 
List<String>> aliasMap) {
     if (node.getToken().getType() == HiveParser.DOT) {
-      String table = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, 
Identifier).toString();
       String col = node.getChild(1).toString();
       for (Map.Entry<String, List<String>> entry : aliasMap.entrySet()) {
         if (entry.getValue().contains(col)) {
@@ -415,47 +395,47 @@ public class UnionQueryWriter {
 
       // Select phrase is dimension
       if (!phrase.hasMeasures(cubeql)) {
-        for (StorageCandidate sc : storageCandidates) {
+        for (StorageCandidateHQLContext sc : storageCandidates) {
           ASTNode exprWithOutAlias = (ASTNode) 
sc.getQueryAst().getSelectAST().getChild(i).getChild(0);
-          storageCandidateToSelectAstMap.get(sc.toString()).
+          
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString()).
               addChild(getSelectExpr(exprWithOutAlias, aliasNode, false));
         }
 
         // Select phrase is measure
       } else if (!phrase.getQueriedMsrs().isEmpty()) {
-        for (StorageCandidate sc : storageCandidates) {
-          if 
(sc.getAnswerableMeasurePhraseIndices().contains(phrase.getPosition())) {
+        for (StorageCandidateHQLContext sc : storageCandidates) {
+          if 
(sc.getStorageCandidate().getAnswerableMeasurePhraseIndices().contains(phrase.getPosition()))
 {
             ASTNode exprWithOutAlias = (ASTNode) 
sc.getQueryAst().getSelectAST().getChild(i).getChild(0);
-            storageCandidateToSelectAstMap.get(sc.toString()).
+            
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString()).
                 addChild(getSelectExpr(exprWithOutAlias, aliasNode, false));
           } else {
             ASTNode resolvedExprNode = getAggregateNodesExpression(i);
             if (isAggregateFunctionUsedInAST(resolvedExprNode)) {
-              setDefaultValueInExprForAggregateNodes(resolvedExprNode, sc);
+              setDefaultValueInExprForAggregateNodes(resolvedExprNode, 
sc.getStorageCandidate());
             } else {
               resolvedExprNode = getSelectExpr(null, null, true);
             }
-            storageCandidateToSelectAstMap.get(sc.toString()).
+            
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString()).
                 addChild(getSelectExpr(resolvedExprNode, aliasNode, false));
           }
         }
 
         // Select phrase is expression
       } else {
-        for (StorageCandidate sc : storageCandidates) {
-          if (phrase.isEvaluable(cubeql, sc)
-              || 
sc.getAnswerableMeasurePhraseIndices().contains(phrase.getPosition())) {
+        for (StorageCandidateHQLContext sc : storageCandidates) {
+          if (phrase.isEvaluable(sc.getStorageCandidate())
+              || 
sc.getStorageCandidate().getAnswerableMeasurePhraseIndices().contains(phrase.getPosition()))
 {
             ASTNode exprWithOutAlias = (ASTNode) 
sc.getQueryAst().getSelectAST().getChild(i).getChild(0);
-            storageCandidateToSelectAstMap.get(sc.toString()).
+            
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString()).
                 addChild(getSelectExpr(exprWithOutAlias, aliasNode, false));
           } else {
             ASTNode resolvedExprNode = getAggregateNodesExpression(i);
             if (isAggregateFunctionUsedInAST(resolvedExprNode)) {
-              setDefaultValueInExprForAggregateNodes(resolvedExprNode, sc);
+              setDefaultValueInExprForAggregateNodes(resolvedExprNode, 
sc.getStorageCandidate());
             } else {
               resolvedExprNode = getSelectExpr(null, null, true);
             }
-            storageCandidateToSelectAstMap.get(sc.toString()).
+            
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString()).
                 addChild(getSelectExpr(resolvedExprNode, aliasNode, false));
           }
         }
@@ -472,7 +452,7 @@ public class UnionQueryWriter {
     ASTNode outerSelectAst = new ASTNode(queryAst.getSelectAST());
     DefaultAliasDecider aliasDecider = new DefaultAliasDecider();
     int selectAliasCounter = 0;
-    for (StorageCandidate sc : storageCandidates) {
+    for (StorageCandidateHQLContext sc : storageCandidates) {
       aliasDecider.setCounter(0);
       ASTNode innerSelectAST = new ASTNode(new CommonToken(TOK_SELECT, 
"TOK_SELECT"));
       processSelectExpression(sc, outerSelectAst, innerSelectAST, 
aliasDecider);
@@ -481,9 +461,9 @@ public class UnionQueryWriter {
     queryAst.setSelectAST(outerSelectAst);
 
     // Iterate over the StorageCandidates and add non projected having columns 
in inner select ASTs
-    for (StorageCandidate sc : storageCandidates) {
+    for (StorageCandidateHQLContext sc : storageCandidates) {
       aliasDecider.setCounter(selectAliasCounter);
-      processHavingAST(sc.getQueryAst().getSelectAST(), aliasDecider, sc);
+      processHavingAST(sc.getQueryAst().getSelectAST(), aliasDecider, 
sc.getStorageCandidate());
     }
     removeRedundantProjectedPhrases();
   }
@@ -497,10 +477,10 @@ public class UnionQueryWriter {
    * @param aliasDecider
    * @throws LensException
    */
-  private void processSelectExpression(StorageCandidate sc, ASTNode 
outerSelectAst, ASTNode innerSelectAST,
+  private void processSelectExpression(StorageCandidateHQLContext sc, ASTNode 
outerSelectAst, ASTNode innerSelectAST,
       AliasDecider aliasDecider) throws LensException {
     //ASTNode selectAST = sc.getQueryAst().getSelectAST();
-    ASTNode selectAST = storageCandidateToSelectAstMap.get(sc.toString());
+    ASTNode selectAST = 
storageCandidateToSelectAstMap.get(sc.getStorageCandidate().toString());
     if (selectAST == null) {
       return;
     }
@@ -509,7 +489,7 @@ public class UnionQueryWriter {
       ASTNode child = (ASTNode) selectAST.getChild(i);
       ASTNode outerSelect = new ASTNode(child);
       ASTNode selectExprAST = (ASTNode) child.getChild(0);
-      ASTNode outerAST = getOuterAST(selectExprAST, innerSelectAST, 
aliasDecider, sc, true,
+      ASTNode outerAST = getOuterAST(selectExprAST, innerSelectAST, 
aliasDecider, sc.getStorageCandidate(), true,
           cubeql.getBaseCube().getDimAttributeNames());
       outerSelect.addChild(outerAST);
       // has an alias? add it
@@ -591,11 +571,8 @@ public class UnionQueryWriter {
           return outerAST;
         } else {
           ASTNode outerAST = getDotAST(cubeql.getCube().getName(), alias);
-          if (isSelectAst) {
-            innerToOuterSelectASTs.put(new 
HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-          } else {
-            innerToOuterHavingASTs.put(new 
HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-          }
+          (isSelectAst ? innerToOuterSelectASTs : innerToOuterHavingASTs)
+            .put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
           return outerAST;
         }
       }
@@ -616,11 +593,8 @@ public class UnionQueryWriter {
     //TODO: take care or non-transitive aggregate functions
     outerAST.addChild(new ASTNode(new CommonToken(Identifier, 
astNode.getChild(0).getText())));
     outerAST.addChild(dotAST);
-    if (isSelectAst) {
-      innerToOuterSelectASTs.put(new 
HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-    } else {
-      innerToOuterHavingASTs.put(new 
HashableASTNode(innerSelectASTWithoutAlias), outerAST);
-    }
+    (isSelectAst ? innerToOuterSelectASTs : innerToOuterHavingASTs)
+      .put(new HashableASTNode(innerSelectASTWithoutAlias), outerAST);
     return outerAST;
   }
 
@@ -693,7 +667,6 @@ public class UnionQueryWriter {
    */
   private Set<String> getAllColumnsOfNode(ASTNode node, Set<String> msrs) {
     if (node.getToken().getType() == HiveParser.DOT) {
-      String table = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, 
Identifier).toString();
       msrs.add(node.getChild(1).toString());
     }
     for (int i = 0; i < node.getChildCount(); i++) {
@@ -710,17 +683,13 @@ public class UnionQueryWriter {
    * @return
    * @throws LensException
    */
-  private String getFromString(Map<StorageCandidate, Set<Dimension>> 
factDimMap) throws LensException {
-    StringBuilder from = new StringBuilder();
+  private String getFromString() throws LensException {
     List<String> hqlQueries = new ArrayList<>();
-    for (StorageCandidate sc : storageCandidates) {
+    for (StorageCandidateHQLContext sc : storageCandidates) {
       removeAggreagateFromDefaultColumns(sc.getQueryAst().getSelectAST());
-      Set<Dimension> queriedDims = factDimMap.get(sc);
-      hqlQueries.add(sc.toHQL(queriedDims));
+      hqlQueries.add(sc.toHQL());
     }
-    return from.append(" ( ")
-        .append(StringUtils.join(" UNION ALL ", hqlQueries))
-        .append(" ) as " + cubeql.getBaseCube()).toString();
+    return hqlQueries.stream().collect(joining(" UNION ALL ", "(", ") as " + 
cubeql.getBaseCube()));
   }
 
   private void removeAggreagateFromDefaultColumns(ASTNode node) throws 
LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index aab671e..1154c3f 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -144,22 +144,11 @@ public class AutoJoinContext {
       for (TableRelationship edge : path.getEdges()) {
         AbstractCubeTable fromTable = edge.getFromTable();
         String fromColumn = edge.getFromColumn();
-        List<String> columnsOfFromTable = fromPathColumns.get(fromTable);
-        if (columnsOfFromTable == null) {
-          columnsOfFromTable = new ArrayList<>();
-          fromPathColumns.put(fromTable, columnsOfFromTable);
-        }
-        columnsOfFromTable.add(fromColumn);
-
+        fromPathColumns.computeIfAbsent(fromTable, k -> new 
ArrayList<>()).add(fromColumn);
         // Similarly populate for the 'to' table
         AbstractCubeTable toTable = edge.getToTable();
         String toColumn = edge.getToColumn();
-        List<String> columnsOfToTable = toPathColumns.get(toTable);
-        if (columnsOfToTable == null) {
-          columnsOfToTable = new ArrayList<>();
-          toPathColumns.put(toTable, columnsOfToTable);
-        }
-        columnsOfToTable.add(toColumn);
+        toPathColumns.computeIfAbsent(toTable, k -> new 
ArrayList<>()).add(toColumn);
       }
     }
   }
@@ -169,24 +158,25 @@ public class AutoJoinContext {
     joinPathFromColumns.remove(dim);
   }
 
-  public String getFromString(String fromTable, StorageCandidate sc, 
Set<Dimension> qdims,
-    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql, 
QueryAST ast) throws LensException {
+  public String getFromString(String fromTable, DimHQLContext sc,
+    Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql) throws 
LensException {
     String fromString = fromTable;
+    Set<Dimension> qdims = dimsToQuery.keySet();
     log.info("All paths dump:{} Queried dims:{}", 
cubeql.getAutoJoinCtx().getAllPaths(), qdims);
-    if (qdims == null || qdims.isEmpty()) {
+    if (qdims.isEmpty()) {
       return fromString;
     }
     // Compute the merged join clause string for the min cost joinClause
-    String clause = getMergedJoinClause(cubeql, sc, ast,
-      cubeql.getAutoJoinCtx().getJoinClause(sc), dimsToQuery);
+    String clause = getMergedJoinClause(cubeql, sc,
+      cubeql.getAutoJoinCtx().getJoinClause(sc.getStorageCandidate()), 
dimsToQuery);
 
     fromString += clause;
     return fromString;
   }
 
   // Some refactoring needed to account for multiple join paths
-  public String getMergedJoinClause(CubeQueryContext cubeql, StorageCandidate 
sc, QueryAST ast, JoinClause joinClause,
-                                    Map<Dimension, CandidateDim> dimsToQuery) 
throws LensException {
+  public String getMergedJoinClause(CubeQueryContext cubeql, DimHQLContext sc, 
JoinClause joinClause,
+    Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     Set<String> clauses = new LinkedHashSet<>();
     String joinTypeStr = "";
     JoinType joinType = JoinType.INNER;
@@ -198,7 +188,7 @@ public class AutoJoinContext {
 
     Iterator<JoinTree> iter = joinClause.getJoinTree().dft();
     boolean hasBridgeTable = false;
-    BridgeTableJoinContext bridgeTableJoinContext = new 
BridgeTableJoinContext(cubeql, sc, ast, bridgeTableFieldAggr,
+    BridgeTableJoinContext bridgeTableJoinContext = new 
BridgeTableJoinContext(cubeql, sc, bridgeTableFieldAggr,
       bridgeTableFieldArrayFilter, doFlatteningEarly);
 
     while (iter.hasNext()) {
@@ -352,36 +342,30 @@ public class AutoJoinContext {
    * Same is done in case of join paths defined in Dimensions.
    *
    * @param cube
-   * @param scSet picked StorageCandidates
+
    * @param dimsToQuery
    * @throws LensException
    */
-  public void pruneAllPaths(CubeInterface cube, Collection<StorageCandidate> 
scSet,
+  public void pruneAllPaths(CubeInterface cube, Collection<String> candColumns,
     final Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     // Remove join paths which cannot be satisfied by the resolved candidate
     // fact and dimension tables
-    if (scSet != null) {
-      // include columns from picked candidate
-      Set<String> candColumns = new HashSet<>();
-      for (StorageCandidate sc : scSet) {
-        candColumns.addAll(sc.getColumns());
-      }
-      for (List<JoinPath> paths : allPaths.values()) {
-        for (int i = 0; i < paths.size(); i++) {
-          JoinPath jp = paths.get(i);
-          List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) 
cube);
-          if (cubeCols != null && !candColumns.containsAll(cubeCols)) {
-            // This path requires some columns from the cube which are not
-            // present in the candidate fact
-            // Remove this path
-            log.info("Removing join path:{} as columns :{} dont exist", jp, 
cubeCols);
-            paths.remove(i);
-            i--;
-          }
+    // include columns from picked candidate
+    for (List<JoinPath> paths : allPaths.values()) {
+      for (int i = 0; i < paths.size(); i++) {
+        JoinPath jp = paths.get(i);
+        List<String> cubeCols = jp.getColumnsForTable((AbstractCubeTable) 
cube);
+        if (cubeCols != null && !candColumns.containsAll(cubeCols)) {
+          // This path requires some columns from the cube which are not
+          // present in the candidate fact
+          // Remove this path
+          log.info("Removing join path:{} as columns :{} dont exist", jp, 
cubeCols);
+          paths.remove(i);
+          i--;
         }
       }
-      pruneEmptyPaths(allPaths);
     }
+    pruneEmptyPaths(allPaths);
     pruneAllPaths(dimsToQuery);
   }
 
@@ -442,7 +426,7 @@ public class AutoJoinContext {
 
   private Map<Aliased<Dimension>, List<JoinPath>> pruneFactPaths(CubeInterface 
cube,
     final StorageCandidate sc) throws LensException {
-    Map<Aliased<Dimension>, List<JoinPath>> prunedPaths = new HashMap<>();
+    Map<Aliased<Dimension>, List<JoinPath>> prunedPaths = new 
LinkedHashMap<>();
     // Remove join paths which cannot be satisfied by the candidate fact
     for (Map.Entry<Aliased<Dimension>, List<JoinPath>> ppaths : 
allPaths.entrySet()) {
       prunedPaths.put(ppaths.getKey(), new ArrayList<>(ppaths.getValue()));
@@ -581,7 +565,7 @@ public class AutoJoinContext {
     }
   }
 
-  public Set<Dimension> pickOptionalTables(final StorageCandidate sc,
+  public Set<Dimension> pickOptionalTables(final DimHQLContext sc,
     Set<Dimension> qdims, CubeQueryContext cubeql) throws LensException {
     // Find the min cost join clause and add dimensions in the clause as 
optional dimensions
     Set<Dimension> joiningOptionalTables = new HashSet<>();
@@ -589,7 +573,7 @@ public class AutoJoinContext {
       return joiningOptionalTables;
     }
     // find least cost path
-    Iterator<JoinClause> itr = getJoinClausesForAllPaths(sc, qdims, cubeql);
+    Iterator<JoinClause> itr = 
getJoinClausesForAllPaths(sc.getStorageCandidate(), qdims, cubeql);
     JoinClause minCostClause = null;
     while (itr.hasNext()) {
       JoinClause clause = itr.next();
@@ -604,10 +588,10 @@ public class AutoJoinContext {
     }
 
     log.info("Fact: {} minCostClause:{}", sc, minCostClause);
-    if (sc != null) {
-      cubeql.getAutoJoinCtx().getFactClauses().put(sc, minCostClause);
+    if (sc.getStorageCandidate() != null) {
+      getFactClauses().put(sc.getStorageCandidate(), minCostClause);
     } else {
-      cubeql.getAutoJoinCtx().setMinCostClause(minCostClause);
+      setMinCostClause(minCostClause);
     }
     for (Dimension dim : minCostClause.getDimsInPath()) {
       if (!qdims.contains(dim)) {

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
index ab5c4f9..5d32bbd 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/BridgeTableJoinContext.java
@@ -41,8 +41,7 @@ public class BridgeTableJoinContext {
   private final String bridgeTableFieldAggr;
   private final String arrayFilter;
   private final CubeQueryContext cubeql;
-  private final StorageCandidate sc;
-  private final QueryAST queryAST;
+  private final DimHQLContext sc;
   private final boolean doFlatteningEarly;
   private boolean initedBridgeClauses = false;
   private final StringBuilder bridgeSelectClause = new StringBuilder();
@@ -51,10 +50,9 @@ public class BridgeTableJoinContext {
   private final StringBuilder bridgeJoinClause = new StringBuilder();
   private final StringBuilder bridgeGroupbyClause = new StringBuilder();
 
-  public BridgeTableJoinContext(CubeQueryContext cubeql, StorageCandidate sc, 
QueryAST queryAST,
+  public BridgeTableJoinContext(CubeQueryContext cubeql, DimHQLContext sc,
     String bridgeTableFieldAggr, String arrayFilter, boolean 
doFlatteningEarly) {
     this.cubeql = cubeql;
-    this.queryAST = queryAST;
     this.sc = sc;
     this.bridgeTableFieldAggr = bridgeTableFieldAggr;
     this.arrayFilter = arrayFilter;
@@ -236,7 +234,7 @@ public class BridgeTableJoinContext {
       }
     }
 
-    void processWhereClauses(StorageCandidate sc) throws LensException {
+    void processWhereClauses(DimHQLContext sc) throws LensException {
       processWhereAST(sc.getQueryAst().getWhereAST(), null, 0);
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
index 4325252..8661496 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/JoinClause.java
@@ -53,19 +53,8 @@ public class JoinClause implements Comparable<JoinClause> {
   void initChainColumns() {
     for (List<TableRelationship> path : chain.values()) {
       for (TableRelationship edge : path) {
-        Set<String> fcols = chainColumns.get(edge.getFromTable());
-        if (fcols == null) {
-          fcols = new HashSet<>();
-          chainColumns.put(edge.getFromTable(), fcols);
-        }
-        fcols.add(edge.getFromColumn());
-
-        Set<String> tocols = chainColumns.get(edge.getToTable());
-        if (tocols == null) {
-          tocols = new HashSet<>();
-          chainColumns.put(edge.getToTable(), tocols);
-        }
-        tocols.add(edge.getToColumn());
+        chainColumns.computeIfAbsent(edge.getFromTable(), k -> new 
HashSet<>()).add(edge.getFromColumn());
+        chainColumns.computeIfAbsent(edge.getToTable(), k -> new 
HashSet<>()).add(edge.getToColumn());
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
 
b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
index 3a17499..a76ef13 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestCubeMetastoreClient.java
@@ -63,7 +63,6 @@ import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-
 public class TestCubeMetastoreClient {
 
   private static CubeMetastoreClient client;

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java 
b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
index 8b3b4ba..7b67b9c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/metadata/TestDateUtil.java
@@ -286,13 +286,13 @@ public class TestDateUtil {
     Date now = new Date();
     Date nowDay = DateUtils.truncate(now, DAY_OF_MONTH);
     Date nowDayMinus2Days = DateUtils.add(nowDay, DAY_OF_MONTH, -2);
-    assertEquals(relativeToAbsolute("now", now), 
DateUtil.ABSDATE_PARSER.get().format(now));
-    assertEquals(relativeToAbsolute("now.day", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDay));
-    assertEquals(relativeToAbsolute("now.day - 2 days", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2 day", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day - 2day", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 day", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
-    assertEquals(relativeToAbsolute("now.day -2 days", now), 
DateUtil.ABSDATE_PARSER.get().format(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now", now), DateUtil.formatAbsDate(now));
+    assertEquals(relativeToAbsolute("now.day", now), 
DateUtil.formatAbsDate(nowDay));
+    assertEquals(relativeToAbsolute("now.day - 2 days", now), 
DateUtil.formatAbsDate(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2 day", now), 
DateUtil.formatAbsDate(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day - 2day", now), 
DateUtil.formatAbsDate(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 day", now), 
DateUtil.formatAbsDate(nowDayMinus2Days));
+    assertEquals(relativeToAbsolute("now.day -2 days", now), 
DateUtil.formatAbsDate(nowDayMinus2Days));
   }
   @Test
   public void testTimestamp() throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 033264c..cdd101b 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -22,10 +22,21 @@ package org.apache.lens.cube.parse;
 import static java.util.Calendar.DAY_OF_MONTH;
 import static java.util.Calendar.HOUR_OF_DAY;
 
-import static org.apache.lens.cube.metadata.DateFactory.*;
-import static org.apache.lens.cube.metadata.UpdatePeriod.*;
-
-import static org.testng.Assert.*;
+import static org.apache.lens.cube.metadata.DateFactory.BEFORE_4_DAYS;
+import static org.apache.lens.cube.metadata.DateFactory.BEFORE_6_DAYS;
+import static org.apache.lens.cube.metadata.DateFactory.NOW;
+import static org.apache.lens.cube.metadata.DateFactory.TWODAYS_BACK;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_MONTHS_BACK;
+import static org.apache.lens.cube.metadata.DateFactory.isZerothHour;
+import static org.apache.lens.cube.metadata.UpdatePeriod.DAILY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.HOURLY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.MINUTELY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.MONTHLY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.QUARTERLY;
+import static org.apache.lens.cube.metadata.UpdatePeriod.YEARLY;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
 
 import java.io.BufferedReader;
 import java.io.BufferedWriter;
@@ -34,7 +45,21 @@ import java.io.FileReader;
 import java.io.FileWriter;
 import java.io.IOException;
 import java.io.StringReader;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.function.Function;
 import java.util.stream.Collectors;
 
 import javax.xml.bind.JAXBException;
@@ -42,7 +67,23 @@ import javax.xml.bind.JAXBException;
 import org.apache.lens.api.ToXMLString;
 import org.apache.lens.api.jaxb.LensJAXBContext;
 import org.apache.lens.api.metastore.SchemaTraverser;
-import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.CubeDimAttribute;
+import org.apache.lens.cube.metadata.CubeDimensionTable;
+import org.apache.lens.cube.metadata.CubeFactTable;
+import org.apache.lens.cube.metadata.CubeMeasure;
+import org.apache.lens.cube.metadata.CubeMetastoreClient;
+import org.apache.lens.cube.metadata.CubeTableType;
+import org.apache.lens.cube.metadata.DateUtil;
+import org.apache.lens.cube.metadata.JAXBUtils;
+import org.apache.lens.cube.metadata.MetastoreConstants;
+import org.apache.lens.cube.metadata.MetastoreUtil;
+import org.apache.lens.cube.metadata.Storage;
+import org.apache.lens.cube.metadata.StorageConstants;
+import org.apache.lens.cube.metadata.StoragePartitionDesc;
+import org.apache.lens.cube.metadata.TestCubeMetastoreClient;
+import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.TimePartitionRange;
+import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
 import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
@@ -110,6 +151,7 @@ public class CubeTestSetup {
   private static Map<String, String> factValidityProperties = 
Maps.newHashMap();
   @Getter
   private static Map<String, List<UpdatePeriod>> storageToUpdatePeriodMap = 
new LinkedHashMap<>();
+
   static {
     factValidityProperties.put(MetastoreConstants.FACT_RELATIVE_START_TIME, 
"now.year - 90 days");
   }
@@ -144,6 +186,7 @@ public class CubeTestSetup {
     return sb.append(") ").append(" as ").append(cubeName).append(" 
").append(outerWhere == null ? "" : outerWhere)
       .append(" ").append(outerPostWhere == null ? "" : 
outerPostWhere).toString();
   }
+
   public static String getExpectedUnionQuery(String cubeName, List<String> 
storages, StoragePartitionProvider provider,
     String outerSelectPart, String outerWhere, String outerPostWhere, String 
innerQuerySelectPart,
     String innerWhere, String innerPostWhere) {
@@ -453,6 +496,16 @@ public class CubeTestSetup {
     return storageTableToWhereClause;
   }
 
+  public static Map<String, String> getWhereForUpdatePeriods(String cubeName, 
String table, Date start, Date end,
+    Set<UpdatePeriod> updatePeriods) {
+    Map<String, String> storageTableToWhereClause = new LinkedHashMap<>();
+    List<String> parts = new ArrayList<>();
+    addParts(parts, updatePeriods, start, end);
+    storageTableToWhereClause.put(getDbName() + table,
+      StorageUtil.getWherePartClause("dt", cubeName, parts));
+    return storageTableToWhereClause;
+  }
+
   public static Map<String, String> getWhereForMonthly(String monthlyTable, 
Date startMonth, Date endMonth) {
     Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, 
String>();
     List<String> parts = new ArrayList<String>();
@@ -475,14 +528,26 @@ public class CubeTestSetup {
   }
 
   public static void addParts(Collection<String> partitions, UpdatePeriod 
updatePeriod, Date from, Date to) {
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(from);
-    Date dt = cal.getTime();
-    while (dt.before(to)) {
-      String part = updatePeriod.format(dt);
-      cal.add(updatePeriod.calendarField(), 1);
-      partitions.add(part);
-      dt = cal.getTime();
+    try {
+      for (TimePartition timePartition : TimePartitionRange.between(from, to, 
updatePeriod)) {
+        partitions.add(timePartition.toString());
+      }
+    } catch (LensException e) {
+      throw new IllegalArgumentException(e);
+    }
+  }
+
+  public static void addParts(Collection<String> partitions, Set<UpdatePeriod> 
updatePeriods, Date from, Date to) {
+    if (updatePeriods.size() != 0) {
+      UpdatePeriod max = CubeFactTable.maxIntervalInRange(from, to, 
updatePeriods);
+      if (max != null) {
+        updatePeriods.remove(max);
+        Date ceilFromDate = DateUtil.getCeilDate(from, max);
+        Date floorToDate = DateUtil.getFloorDate(to, max);
+        addParts(partitions, updatePeriods, from, ceilFromDate);
+        addParts(partitions, max, ceilFromDate, floorToDate);
+        addParts(partitions, updatePeriods, floorToDate, to);
+      }
     }
   }
 
@@ -741,7 +806,7 @@ public class CubeTestSetup {
       throw exc;
     }
   }
-  StrSubstitutor substitutor = new StrSubstitutor(new StrLookup<String>() {
+  private static final StrSubstitutor GREGORIAN_SUBSTITUTOR = new 
StrSubstitutor(new StrLookup<String>() {
     @Override
     public String lookup(String s) {
       try {
@@ -751,7 +816,7 @@ public class CubeTestSetup {
       }
     }
   }, "$gregorian{", "}", '$');
-  StrSubstitutor substitutor2 = new StrSubstitutor(new StrLookup<String>() {
+  private static final StrSubstitutor ABSOLUTE_SUBSTITUTOR = new 
StrSubstitutor(new StrLookup<String>() {
     @Override
     public String lookup(String s) {
       try {
@@ -763,10 +828,11 @@ public class CubeTestSetup {
   }, "$absolute{", "}", '$');
   private void createFromXML(CubeMetastoreClient client) {
     SchemaTraverser.SchemaEntityProcessor processor = (file, aClass) -> {
+      Function<String, String> f = GREGORIAN_SUBSTITUTOR::replace;
+      Function<String, String> g = ABSOLUTE_SUBSTITUTOR::replace;
       try {
         BufferedReader br = new BufferedReader(new FileReader(file));
-        String replaced = 
br.lines().map(s->substitutor2.replace(substitutor.replace(s)))
-          .collect(Collectors.joining("\n"));
+        String replaced = 
br.lines().map(f.andThen(g)).collect(Collectors.joining("\n"));
         StringReader sr = new StringReader(replaced);
         client.createEntity(LensJAXBContext.unmarshall(sr));
       } catch (LensException | JAXBException | IOException e) {
@@ -810,6 +876,7 @@ public class CubeTestSetup {
 //    }
   }
 
+
   public void dropSources(HiveConf conf, String dbName) throws Exception {
     Hive metastore = Hive.get(conf);
     metastore.dropDatabase(dbName, true, true, true);
@@ -833,6 +900,7 @@ public class CubeTestSetup {
     CubeFactTable fact4 = client.getFactTable(factName);
     createPIEParts(client, fact4, c2);
   }
+
   private void createBaseCubeFactPartitions(CubeMetastoreClient client) throws 
HiveException, LensException {
     String factName = "testFact5_RAW_BASE";
     CubeFactTable fact = client.getFactTable(factName);

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index 93dbfc3..f87158c 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -48,7 +48,6 @@ import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
-import com.google.common.base.Splitter;
 import lombok.Getter;
 
 public class TestBaseCubeQueries extends TestQueryRewrite {
@@ -93,14 +92,6 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
 
   }
 
-  private void compareStrings(List<String> factTablesList, Map.Entry<String, 
List<CandidateTablePruneCause>> entry) {
-    String factTablesString = entry.getKey();
-    Iterable<String> factTablesIterator = 
Splitter.on(',').split(factTablesString);
-    for (String factTable : factTablesIterator) {
-      Assert.assertTrue(factTablesList.contains(factTable), "Not selecting" + 
factTable + "fact table");
-    }
-  }
-
   @Test
   public void testCommonDimensions() throws Exception {
     String hqlQuery = rewrite("select dim1, SUM(msr1) from basecube" + " where 
" + TWO_DAYS_RANGE, conf);
@@ -145,7 +136,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
   public void testMultiFactQueryWithNoDimensionsSelected() throws Exception {
     CubeQueryContext ctx = rewriteCtx("select roundedmsr2, msr12 from 
basecube" + " where " + TWO_DAYS_RANGE, conf);
     Set<String> storageCandidates = new HashSet<String>();
-    Set<StorageCandidate> scSet = 
CandidateUtil.getStorageCandidates(ctx.getCandidates());
+    Collection<StorageCandidate> scSet = 
CandidateUtil.getStorageCandidates(ctx.getCandidates());
     for (StorageCandidate sc : scSet) {
       storageCandidates.add(sc.getStorageTable());
     }
@@ -171,7 +162,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     CubeQueryContext ctx = rewriteCtx("select roundedmsr2, msr14, msr12 from 
basecube" + " where " + TWO_DAYS_RANGE,
       conf);
     Set<String> storageCandidates = new HashSet<String>();
-    Set<StorageCandidate> scSet = 
CandidateUtil.getStorageCandidates(ctx.getCandidates());
+    Collection<StorageCandidate> scSet = 
CandidateUtil.getStorageCandidates(ctx.getCandidates());
     for (StorageCandidate sc : scSet) {
       storageCandidates.add(sc.getStorageTable());
     }
@@ -785,9 +776,8 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     // If going to fallback timedim, and partitions are missing, then error 
should be missing partition on that
     conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C4");
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
-    LensException exc =
+    NoCandidateFactAvailableException ne =
       getLensExceptionInRewrite("select msr12 from basecube where " + 
TWO_DAYS_RANGE, conf);
-    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) 
exc;
     PruneCauses.BriefAndDetailedError pruneCause = ne.getJsonMessage();
     assertTrue(pruneCause.getBrief().contains("Missing partitions"), 
pruneCause.getBrief());
     
assertEquals(pruneCause.getDetails().get("c4_testfact2_base").iterator().next().getCause(),
 MISSING_PARTITIONS);
@@ -827,17 +817,20 @@ public class TestBaseCubeQueries extends TestQueryRewrite 
{
     StorageCandidate sc = 
CandidateUtil.getStorageCandidates(ctx.getCandidates().iterator().next()).iterator().next();
     assertEquals(sc.getRangeToPartitions().size(), 2);
     for(TimeRange range: sc.getRangeToPartitions().keySet()) {
-      String rangeWhere = 
CandidateUtil.getTimeRangeWhereClasue(ctx.getRangeWriter(), sc, range);
-      if (range.getPartitionColumn().equals("dt")) {
+      String rangeWhere = sc.getTimeRangeWhereClasue(ctx.getRangeWriter(), 
range);
+      switch (range.getPartitionColumn()) {
+      case "dt":
         ASTNode parsed = HQLParser.parseExpr(rangeWhere);
         assertEquals(parsed.getToken().getType(), KW_AND);
         assertTrue(rangeWhere.substring(((CommonToken) 
parsed.getToken()).getStopIndex() + 1)
           .toLowerCase().contains(dTimeWhereClause));
         assertFalse(rangeWhere.substring(0, ((CommonToken) 
parsed.getToken()).getStartIndex())
           .toLowerCase().contains("and"));
-      } else if (range.getPartitionColumn().equals("ttd")) {
+        break;
+      case "ttd":
         assertFalse(rangeWhere.toLowerCase().contains("and"));
-      } else {
+        break;
+      default:
         throw new LensException("Unexpected");
       }
     }
@@ -989,8 +982,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
     assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as 
`dim1`, "
         + "(basecube.alias1) as `dim11` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL") && hqlQuery.endsWith("HAVING 
((sum((basecube.alias2)) > 2) "
-        + "and (round((sum((basecube.alias3)) / 1000)) > 0) and 
(sum((basecube.alias3)) > 100))"), hqlQuery);
-
+      + "and (round((sum((basecube.alias3)) / 1000)) > 0) and 
(sum((basecube.alias3)) > 100))"), hqlQuery);
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
       + "having msr12+roundedmsr2 <= 1000", conf);
     expected1 = getExpectedQuery(cubeName,
@@ -1025,14 +1017,14 @@ public class TestBaseCubeQueries extends 
TestQueryRewrite {
       null, " group by basecube.dim1, basecube.dim11",
       getWhereForDailyAndHourly2days(cubeName, "C1_testFact1_BASE"));
 
+
     compareContains(expected1, hqlQuery);
     compareContains(expected2, hqlQuery);
     assertTrue(hqlQuery.toLowerCase().startsWith("select (basecube.alias0) as 
`dim1`, (basecube.alias1) "
         + "as `dim11` from "), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
       && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) > 2) and 
(round((sum((basecube.alias3)) / 1000)) > 0) "
-        + "and ((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 
1000))) <= 1000))"), hqlQuery);
-
+      + "and ((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 
1000))) <= 1000))"), hqlQuery);
 
     hqlQuery = rewrite("select dim1, dim11 from basecube where " + 
TWO_DAYS_RANGE
       + "having msr12 > 2 or roundedmsr2 > 0 or msr12+roundedmsr2 <= 1000", 
conf);
@@ -1053,6 +1045,6 @@ public class TestBaseCubeQueries extends TestQueryRewrite 
{
         + "as `dim11` from"), hqlQuery);
     assertTrue(hqlQuery.contains("UNION ALL")
       && hqlQuery.endsWith("HAVING ((sum((basecube.alias2)) > 2) or 
(round((sum((basecube.alias3)) / 1000)) > 0) "
-        + "or ((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 
1000))) <= 1000))"), hqlQuery);
+      + "or ((sum((basecube.alias2)) + round((sum((basecube.alias3)) / 1000))) 
<= 1000))"), hqlQuery);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index bab1080..7d1f80d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -19,6 +19,8 @@
 
 package org.apache.lens.cube.parse;
 
+import static 
org.apache.lens.cube.error.LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE;
+import static org.apache.lens.cube.error.LensCubeErrorCode.NO_FACT_HAS_COLUMN;
 import static org.apache.lens.cube.metadata.DateFactory.*;
 import static org.apache.lens.cube.metadata.UpdatePeriod.*;
 import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
@@ -114,9 +116,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     //from date 6 days back
     timeRangeString = getTimeRangeString(DAILY, -6, 0, qFmt);
-    LensException th = getLensExceptionInRewrite("select SUM(msr15) from 
testCube where "
+    NoCandidateFactAvailableException th = getLensExceptionInRewrite("select 
SUM(msr15) from testCube where "
       + timeRangeString, getConf());
-    assertEquals(th.getErrorCode(), 
LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
 
   @Test
@@ -152,7 +153,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
     conf.set(DRIVER_SUPPORTED_STORAGES, "C1,C2,C4");
     CubeQueryContext cubeQueryContext =
       rewriteCtx("select SUM(msr2) from testCube where " + THIS_YEAR_RANGE, 
conf);
-    PruneCauses<StorageCandidate> pruneCause = 
cubeQueryContext.getStoragePruningMsgs();
+    PruneCauses<Candidate> pruneCause = 
cubeQueryContext.getStoragePruningMsgs();
     long lessDataCauses = pruneCause.values().stream()
       
.flatMap(Collection::stream).map(CandidateTablePruneCause::getCause).filter(LESS_DATA::equals).count();
     assertTrue(lessDataCauses > 0);
@@ -178,10 +179,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     conf.setBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, true);
 
-    LensException th = getLensExceptionInRewrite(
+    NoCandidateFactAvailableException ne = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + TWO_DAYS_RANGE, conf);
-    assertEquals(th.getErrorCode(), 
LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) 
th;
     PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
     int endIndex = MISSING_PARTITIONS.errorFormat.length() - 3;
     assertEquals(
@@ -629,7 +628,7 @@ public class TestCubeRewriter extends TestQueryRewrite {
         + TWO_DAYS_RANGE, conf);
     String expected =
       getExpectedQuery(TEST_CUBE_NAME, "select citydim.name as `name`, 
sum(testcube.msr2) as `sum(msr2)` FROM "
-          , "INNER JOIN " + getDbName() + "c2_citytable citydim ON" + " 
testCube.cityid = citydim.id",
+          , " INNER JOIN " + getDbName() + "c2_citytable citydim ON" + " 
testCube.cityid = citydim.id",
           null, " group by citydim.name ",
         null, getWhereForDailyAndHourly2days(TEST_CUBE_NAME, "C2_testfact"));
     compareQueries(hqlQuery, expected);
@@ -872,8 +871,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     String expectedRewrittenQuery = "SELECT (citydim.name) as `Alias With  
Spaces`, sum((testcube.msr2)) "
       + "as `TestMeasure` FROM TestQueryRewrite.c2_testfact testcube inner 
JOIN TestQueryRewrite.c2_citytable citydim "
-      + "ON ((testcube.cityid) = (citydim.id)) WHERE ((((testcube.dt) = '"
-      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "'))) GROUP BY 
(citydim.name)";
+      + "ON ((testcube.cityid) = (citydim.id)) WHERE ((testcube.dt) = '"
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "') GROUP BY 
(citydim.name)";
 
     String actualRewrittenQuery = rewrite(inputQuery, 
getConfWithStorages("C2"));
 
@@ -889,8 +888,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
 
     String expectedRewrittenQuery = "SELECT (citydim.name) as `Alias With  
Spaces`, sum((testcube.msr2)) "
       + "as `TestMeasure` FROM TestQueryRewrite.c2_testfact testcube inner 
JOIN TestQueryRewrite.c2_citytable citydim "
-      + "ON ((testcube.cityid) = (citydim.id)) WHERE ((((testcube.dt) = '"
-      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "'))) GROUP BY 
(citydim.name)";
+      + "ON ((testcube.cityid) = (citydim.id)) WHERE ((testcube.dt) = '"
+      + getDateUptoHours(getDateWithOffset(HOURLY, -1)) + "') GROUP BY 
(citydim.name)";
 
     String actualRewrittenQuery = rewrite(inputQuery, 
getConfWithStorages("C2"));
 
@@ -997,10 +996,8 @@ public class TestCubeRewriter extends TestQueryRewrite {
     Configuration conf = getConf();
     conf.setBoolean(CubeQueryConfUtil.FAIL_QUERY_ON_PARTIAL_DATA, true);
 
-    LensException e = getLensExceptionInRewrite(
+    NoCandidateFactAvailableException ne = getLensExceptionInRewrite(
       "select SUM(msr2) from testCube" + " where " + 
TWO_MONTHS_RANGE_UPTO_HOURS, conf);
-    assertEquals(e.getErrorCode(), 
LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
-    NoCandidateFactAvailableException ne = (NoCandidateFactAvailableException) 
e;
     PruneCauses.BriefAndDetailedError pruneCauses = ne.getJsonMessage();
 
     assertEquals(
@@ -1615,16 +1612,13 @@ public class TestCubeRewriter extends TestQueryRewrite {
     assertTrue(hql1.contains("c1_testfact4_raw_base"));
     // Start time for dim attribute user_id_added_far_future is 2099-01-01
     String query2 = "select user_id_added_far_future from basecube where " + 
TWO_DAYS_RANGE;
-    LensException e1 = getLensExceptionInRewrite(query2, getConf());
-    assertTrue(e1.getMessage().contains("NO_FACT_HAS_COLUMN"));
+    assertLensExceptionInRewrite(query2, getConf(), NO_FACT_HAS_COLUMN);
     // End time for dim attribute user_id_deprecated is 2016-01-01
     String query3 = "select user_id_deprecated from basecube where " + 
TWO_DAYS_RANGE;
-    LensException e2 = getLensExceptionInRewrite(query3, getConf());
-    assertTrue(e2.getMessage().contains("NO_FACT_HAS_COLUMN"));
+    assertLensExceptionInRewrite(query3, getConf(), NO_FACT_HAS_COLUMN);
     // Start time for ref column user_id_added_far_future_chain is 2099-01-01
     String query4 = "select user_id_added_far_future_chain.name from basecube 
where " + TWO_DAYS_RANGE;
-    LensException e3 = getLensExceptionInRewrite(query4, getConf());
-    assertTrue(e3.getMessage().contains("NO_FACT_HAS_COLUMN"));
+    assertLensExceptionInRewrite(query4, getConf(), 
NO_CANDIDATE_FACT_AVAILABLE);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeSegmentationRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeSegmentationRewriter.java
 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeSegmentationRewriter.java
new file mode 100644
index 0000000..7f8662a
--- /dev/null
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeSegmentationRewriter.java
@@ -0,0 +1,322 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+import static org.apache.lens.cube.metadata.DateFactory.NOW;
+import static org.apache.lens.cube.metadata.DateFactory.TWO_DAYS_RANGE;
+import static 
org.apache.lens.cube.metadata.DateFactory.TWO_MONTHS_RANGE_UPTO_DAYS;
+import static org.apache.lens.cube.metadata.DateFactory.getDateWithOffset;
+import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.SEGMENTATION_PRUNED;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.DISABLE_AUTO_JOINS;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY;
+import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.RESOLVE_SEGMENTATIONS;
+import static org.apache.lens.cube.parse.CubeTestSetup.getDbName;
+import static org.apache.lens.cube.parse.CubeTestSetup.getExpectedQuery;
+import static 
org.apache.lens.cube.parse.CubeTestSetup.getWhereForDailyAndHourly2days;
+import static org.apache.lens.cube.parse.CubeTestSetup.getWhereForHourly2days;
+import static 
org.apache.lens.cube.parse.CubeTestSetup.getWhereForUpdatePeriods;
+import static org.apache.lens.cube.parse.TestCubeRewriter.compareQueries;
+
+import static org.apache.commons.lang3.time.DateUtils.addDays;
+
+import static org.testng.Assert.assertEquals;
+
+import static com.google.common.collect.Lists.newArrayList;
+
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.apache.lens.cube.error.NoCandidateFactAvailableException;
+import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.server.api.LensServerAPITestUtil;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.hadoop.conf.Configuration;
+
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import com.google.common.collect.Sets;
+import junit.framework.Assert;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class TestCubeSegmentationRewriter extends TestQueryRewrite {
+
+  private Configuration conf;
+
+  @BeforeTest
+  public void setupDriver() throws Exception {
+    conf = LensServerAPITestUtil.getConfiguration(
+      DRIVER_SUPPORTED_STORAGES, "C0,C1",
+      DISABLE_AUTO_JOINS, false,
+      ENABLE_SELECT_TO_GROUPBY, true,
+      ENABLE_GROUP_BY_TO_SELECT, true,
+      RESOLVE_SEGMENTATIONS, true,
+      DISABLE_AGGREGATE_RESOLVER, false,
+      ENABLE_FLATTENING_FOR_BRIDGETABLES, true);
+  }
+
+  @Override
+  public Configuration getConf() {
+    return new Configuration(conf);
+  }
+
+  private static String extractTableName(String query) {
+    String l = query.toLowerCase();
+    int fromIndex = l.indexOf("from");
+    int toIndex = l.indexOf(" ", fromIndex + 5);
+    return l.substring(fromIndex + 5, toIndex);
+  }
+
+  private static void compareUnionQuery(CubeQueryContext cubeql, String begin, 
String end, List<String> queries)
+    throws LensException {
+    final String actualLower = cubeql.toHQL().toLowerCase();
+    queries.sort(Comparator.comparing(s -> 
actualLower.indexOf(extractTableName(s))));
+    String expected = queries.stream().collect(Collectors.joining(" UNION ALL 
", begin, end));
+    compareQueries(actualLower, expected);
+  }
+
+  @Test
+  public void testSegmentRewrite() throws Exception {
+    CubeQueryContext ctx = rewriteCtx("select cityid, segmsr1 from testcube 
where " + TWO_DAYS_RANGE,
+      getConf());
+    String query1 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, sum(testcube.segmsr1) as alias1 FROM 
", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    String query2 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, sum(testcube.segmsr1) as alias1 FROM 
", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    compareUnionQuery(ctx,
+      "SELECT (testcube.alias0) as `cityid`, sum((testcube.alias1)) as 
`segmsr1` FROM (",
+      " ) as testcube GROUP BY (testcube.alias0)", newArrayList(query1, 
query2));
+  }
+
+  /*
+  Asking for segmsr1 from testcube. segmsr1 is available in b1b2fact and seg1 
split over time.
+   Inside seg1, Two segments are there: b1cube and b2cube. b1cube has one fact 
b1fact which
+   is split over time across two storages: c1 and c2. b2cube has one fact 
which answers complete range
+   given to it. So total 4 storage candidates should be there.
+   */
+  @Test
+  public void testFactUnionSegmentWithInnerUnion() throws Exception {
+    CubeQueryContext ctx = rewriteCtx("select cityid, segmsr1 from testcube 
where " + TWO_MONTHS_RANGE_UPTO_DAYS,
+      getConf());
+    String query1, query2, query3, query4;
+    query1 = getExpectedQuery("testcube", "select testcube.cityid as alias0, 
sum(testcube.segmsr1) as alias1 from ",
+      null, "group by testcube.cityid",
+      getWhereForUpdatePeriods("testcube", "c0_b1fact1",
+        addDays(getDateWithOffset(UpdatePeriod.MONTHLY, -1), -1), 
getDateWithOffset(UpdatePeriod.DAILY, -10),
+        Sets.newHashSet(UpdatePeriod.MONTHLY, UpdatePeriod.DAILY)));
+    query2 = getExpectedQuery("testcube", "select testcube.cityid as alias0, 
sum(testcube.segmsr1) as alias1 from ",
+      null, "group by testcube.cityid",
+      getWhereForUpdatePeriods("testcube", "c1_b1fact1",
+        getDateWithOffset(UpdatePeriod.DAILY, -11), NOW,
+        Sets.newHashSet(UpdatePeriod.MONTHLY, UpdatePeriod.DAILY)));
+    query3 = getExpectedQuery("testcube", "select testcube.cityid as alias0, 
sum(testcube.segmsr1) as alias1 from ",
+      null, "group by testcube.cityid",
+      getWhereForUpdatePeriods("testcube", "c0_b2fact1",
+        addDays(getDateWithOffset(UpdatePeriod.MONTHLY, -1), -1), NOW,
+        Sets.newHashSet(UpdatePeriod.MONTHLY, UpdatePeriod.DAILY)));
+    query4 = getExpectedQuery("testcube", "select testcube.cityid as alias0, 
sum(testcube.segmsr1) as alias1 from ",
+      null, "group by testcube.cityid",
+      getWhereForUpdatePeriods("testcube", "c0_b1b2fact1",
+        addDays(getDateWithOffset(UpdatePeriod.MONTHLY, -2), -1),
+        addDays(getDateWithOffset(UpdatePeriod.MONTHLY, -1), 0),
+        Sets.newHashSet(UpdatePeriod.MONTHLY, UpdatePeriod.DAILY)));
+    compareUnionQuery(ctx, "select testcube.alias0 as cityid, 
sum(testcube.alias1) as segmsr1 from (",
+      ") AS testcube GROUP BY (testcube.alias0)", newArrayList(query1, query2, 
query3, query4));
+  }
+
+  @Test
+  public void testFactJoinSegmentWithInnerUnion() throws Exception {
+    CubeQueryContext ctx = rewriteCtx("select cityid, msr2, segmsr1 from 
testcube where " + TWO_DAYS_RANGE,
+      getConf());
+    String query1, query2, query3;
+    query1 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    query2 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    query3 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, sum(testcube.msr2) as alias1, 0.0 as 
alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForHourly2days("testcube", "c1_testfact2"));
+    compareUnionQuery(ctx,
+      "select testcube.alias0 as cityid, sum(testcube.alias1) as msr2, 
sum(testcube.alias2) as segmsr1 from ( ",
+      ") as testcube group by testcube.alias0", newArrayList(query1, query2, 
query3));
+  }
+
+  @Test
+  public void testFieldWithDifferentDescriptions() throws LensException {
+    NoCandidateFactAvailableException e = getLensExceptionInRewrite("select 
invmsr1 from testcube where "
+      + TWO_DAYS_RANGE, getConf());
+    assertEquals(e.getJsonMessage().getBrief(), "Columns [invmsr1] are not 
present in any table");
+  }
+
+  @Test
+  public void testExpressions() throws Exception {
+    CubeQueryContext ctx = rewriteCtx("select singlecolchainfield, segmsr1 
from testcube where " + TWO_DAYS_RANGE,
+      getConf());
+    String joinExpr = " JOIN " + getDbName()
+      + "c1_citytable cubecity ON testcube.cityid = cubecity.id AND 
(cubecity.dt = 'latest')";
+    String query1, query2;
+    query1 = getExpectedQuery("testcube",
+      "SELECT (cubecity.name) AS `alias0`, sum((testcube.segmsr1)) AS `alias1` 
from",
+      joinExpr,
+      null, "group by cubecity.name", null, 
getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    query2 = getExpectedQuery("testcube",
+      "SELECT (cubecity.name) AS `alias0`, sum((testcube.segmsr1)) AS `alias1` 
from",
+      joinExpr,
+      null, "group by cubecity.name", null, 
getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    compareUnionQuery(ctx,
+      "SELECT (testcube.alias0) AS `singlecolchainfield`, 
sum((testcube.alias1)) AS `segmsr1` from (",
+      "as testcube group by testcube.alias0", newArrayList(query1, query2));
+  }
+
+  @Test
+  public void testQueryWithWhereHavingGroupby() throws Exception {
+    String userQuery = "select cityid, msr2, segmsr1 from testcube where 
cityname='blah' and "
+      + TWO_DAYS_RANGE + " group by cityid having segmsr1 > 1 and msr2 > 2";
+    CubeQueryContext ctx = rewriteCtx(userQuery, getConf());
+    String join1, join2, join3;
+    String query1, query2, query3;
+    join1 = "join " + getDbName()
+      + "c1_citytable cubecity1 ON testcube.cityid1 = cubecity1.id AND 
(cubecity1.dt = 'latest')";
+    join2 = "join " + getDbName()
+      + "c1_citytable cubecity2 ON testcube.cityid2 = cubecity2.id AND 
(cubecity2.dt = 'latest')";
+    join3 = "join " + getDbName()
+      + "c1_citytable cubecity ON testcube.cityid = cubecity.id AND 
(cubecity.dt = 'latest')";
+    query1 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", join1,
+      "cubecity1.name='blah'", "group by testcube.cityid", null,
+      getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    query2 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", join2,
+      "cubecity2.name='blah'", "group by testcube.cityid", null,
+      getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    query3 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, sum(testcube.msr2) as alias1, 0.0 as 
alias2 FROM ", join3,
+      "cubecity.name='blah'", "group by testcube.cityid", null,
+      getWhereForHourly2days("testcube", "c1_testfact2"));
+    compareUnionQuery(ctx,
+      "select testcube.alias0 as cityid, sum(testcube.alias1) as msr2, 
sum(testcube.alias2) as segmsr1 from ( ",
+      ") as testcube group by testcube.alias0 having ((sum((testcube.alias2)) 
> 1) and (sum((testcube.alias1)) > 2)",
+      newArrayList(query1, query2, query3));
+  }
+
+  @Test
+  public void testQueryWithManyToMany() throws LensException {
+    String userQuery = "select usersports.name, xusersports.name, 
yusersports.name, segmsr1, msr2 from testcube where "
+      + TWO_DAYS_RANGE;
+    CubeQueryContext ctx = rewriteCtx(userQuery, getConf());
+    String query1, query2, query3;
+    String joinExpr = " join " + getDbName() + "c1_usertable userdim_1 on 
testcube.userid = userdim_1.id "
+      + " join  (select user_interests_1.user_id as user_id, 
collect_set(usersports.name) as balias0 from "
+      + getDbName() + "c1_user_interests_tbl user_interests_1 join " + 
getDbName() + "c1_sports_tbl usersports on "
+      + "user_interests_1.sport_id = usersports.id group by 
user_interests_1.user_id) "
+      + "usersports on userdim_1.id = usersports.user_id"
+      + " join " + getDbName() + "c1_usertable userdim_0 on testcube.yuserid = 
userdim_0.id "
+      + " join  (select user_interests_0.user_id as 
user_id,collect_set(yusersports.name) as balias0 from "
+      + getDbName() + "c1_user_interests_tbl user_interests_0 join " + 
getDbName() + "c1_sports_tbl yusersports on "
+      + " user_interests_0.sport_id = yusersports.id group by 
user_interests_0.user_id) yusersports on userdim_0.id ="
+      + " yusersports.user_id join " + getDbName() + "c1_usertable userdim on 
testcube.xuserid = userdim.id"
+      + " join  (select user_interests.user_id as 
user_id,collect_set(xusersports.name) as balias0 from "
+      + getDbName() + "c1_user_interests_tbl user_interests join " + 
getDbName() + "c1_sports_tbl xusersports"
+      + " on user_interests.sport_id = xusersports.id group by 
user_interests.user_id) xusersports on userdim.id = "
+      + " xusersports.user_id";
+    query1 = getExpectedQuery("testcube",
+      "select (usersports.balias0) AS `alias0`, (xusersports.balias0) AS 
`alias1`, (yusersports.balias0) AS `alias2`, "
+        + "sum((testcube.segmsr1)) AS `alias3`, 0.0 AS `alias4` FROM ", 
joinExpr, null,
+      "group by (usersports.balias0), (xusersports.balias0), 
(yusersports.balias0), ", null,
+      getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    query2 = getExpectedQuery("testcube",
+      "select (usersports.balias0) AS `alias0`, (xusersports.balias0) AS 
`alias1`, (yusersports.balias0) AS `alias2`, "
+        + "sum((testcube.segmsr1)) AS `alias3`, 0.0 AS `alias4` FROM ", 
joinExpr, null,
+      "group by (usersports.balias0), (xusersports.balias0), 
(yusersports.balias0)", null,
+      getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    query3 = getExpectedQuery("testcube",
+      "select (usersports.balias0) AS `alias0`, (xusersports.balias0) AS 
`alias1`, (yusersports.balias0) AS `alias2`, "
+        + "0.0 AS `alias3`, sum(testcube.msr2) AS `alias4` FROM ", joinExpr, 
null,
+      "group by (usersports.balias0), (xusersports.balias0), 
(yusersports.balias0)", null,
+      getWhereForHourly2days("testcube", "c1_testfact2"));
+    compareUnionQuery(ctx,
+      "select testcube.alias0 AS `name`,testcube.alias1 AS `name`, 
testcube.alias2 AS `name`, "
+        + "sum((testcube.alias3)) AS `segmsr1`, sum((testcube.alias4)) AS 
`msr2` from ( ",
+      ") as testcube group by testcube.alias0, testcube.alias1, 
testcube.alias2",
+      newArrayList(query1, query2, query3));
+  }
+
+  @Test
+  public void testQueryWithHavingOnInnerMeasure() throws LensException {
+    String userQuery = "select cityid from testcube where " + TWO_DAYS_RANGE + 
" having msr2 > 2 and segmsr1 > 1";
+    CubeQueryContext ctx = rewriteCtx(userQuery, getConf());
+    String query1, query2, query3;
+    query1 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c1_b1fact1"));
+    query2 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, 0.0 as alias1, sum(testcube.segmsr1) 
as alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForDailyAndHourly2days("testcube", "c0_b2fact1"));
+    query3 = getExpectedQuery("testcube",
+      "select testcube.cityid as alias0, sum(testcube.msr2) as alias1, 0.0 as 
alias2 FROM ", null,
+      "group by testcube.cityid",
+      getWhereForHourly2days("testcube", "c1_testfact2"));
+    compareUnionQuery(ctx,
+      "select testcube.alias0 as cityid from ( ",
+      ") as testcube group by testcube.alias0 having sum(testcube.alias1) > 2 
and sum(testcube.alias2) > 1",
+      newArrayList(query1, query2, query3));
+  }
+
+  @Test
+  public void testSegmentationWithSingleSegment() throws LensException {
+    String userQuery = "select segmsr1 from basecube where " + TWO_DAYS_RANGE;
+    String actual = rewrite(userQuery, getConf());
+    String expected = getExpectedQuery("basecube",
+      "select sum(basecube.segmsr1) FROM ", null,
+      null,
+      getWhereForDailyAndHourly2days("basecube", "c1_b1fact1"));
+    compareQueries(actual, expected);
+  }
+  @Test
+  public void testSegmentationPruningWithPruneCause() throws LensException {
+    String userQuery = "select segsegmsr1 from testcube where " + 
TWO_DAYS_RANGE;
+    PruneCauses<Candidate> pruneCauses = getBriefAndDetailedError(userQuery, 
getConf());
+    Assert.assertEquals(pruneCauses.getMaxCause(), SEGMENTATION_PRUNED);
+    Map<String, String> innerCauses = 
pruneCauses.getCompact().get("SEG[b1cube; b2cube]")
+      .iterator().next().getInnerCauses();
+    Assert.assertEquals(innerCauses.size(), 2);
+    Assert.assertTrue(innerCauses.get("b1cube").equals("Columns [segsegmsr1] 
are not present in any table"));
+    Assert.assertTrue(innerCauses.get("b2cube").equals("Columns [segsegmsr1] 
are not present in any table"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
index d97c0e6..ffd0dec 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestDenormalizationResolver.java
@@ -203,8 +203,8 @@ public class TestDenormalizationResolver extends 
TestQueryRewrite {
     expected.put(newHashSet("c2_summary2", "c2_summary3", "c1_testfact2_raw", 
""
         + "c3_testfact2_raw", "c1_summary3", "c1_summary2"),
       newArrayList(new 
CandidateTablePruneCause(CandidateTablePruneCode.INVALID_DENORM_TABLE)));
-    expected.put(newHashSet("c0_testfact_continuous"), 
newArrayList(columnNotFound(
-      "msr2", "msr3")));
+    expected.put(newHashSet("c0_b1b2fact1", "c0_testfact_continuous", 
"SEG[b1cube; b2cube]"),
+      newArrayList(columnNotFound("msr2", "msr3")));
     expected.put(newHashSet("c2_summary2", "c2_summary3", "c2_summary4", 
"c4_testfact", "c2_summary1",
       "c3_testfact", "c3_testfact2_raw", "c6_testfact", "c4_testfact2", 
"c5_testfact", "c99_cheapfact",
       "c2_testfact", "c0_cheapfact", "c2_testfactmonthly", "c0_testfact"),
@@ -269,10 +269,9 @@ public class TestDenormalizationResolver extends 
TestQueryRewrite {
     Configuration tConf = new Configuration(conf);
     tConf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "");
     //test_time_dim2 and dim2 are not querable together
-    NoCandidateFactAvailableException e = 
(NoCandidateFactAvailableException)getLensExceptionInRewrite(
+    // not checking error codes or prune causes. Just verifying not answerable
+    NoCandidateFactAvailableException e = getLensExceptionInRewrite(
       "select dim2, test_time_dim2 from testcube where " + TWO_DAYS_RANGE, 
tConf);
-    Assert.assertEquals(e.getJsonMessage().getBrief(),
-      "Range not answerable"); // getting storage update periods are not valid 
for given time range
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
index 1db3712..8c5dadb 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestExpressionResolver.java
@@ -329,7 +329,7 @@ public class TestExpressionResolver extends 
TestQueryRewrite {
       getLensExceptionInRewrite("select cityStateName, msr2expr, msr5, msr15 
from testCube where "
         + TWO_DAYS_RANGE, conf);
     Assert.assertEquals(th.getErrorCode(),
-      
LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo().getErrorCode());
+      
LensCubeErrorCode.NO_JOIN_CANDIDATE_AVAILABLE.getLensErrorInfo().getErrorCode());
   }
   @Test
   public void testMaterializedExpressionPickingMaterializedValue() throws 
Exception {

Reply via email to