Addressed final reviews and fixed checkstyle error

Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/cb5e2a77
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/cb5e2a77
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/cb5e2a77

Branch: refs/heads/lens-1381
Commit: cb5e2a77dfd09c6736c9b295f68addba6bf27e6e
Parents: 72c20ff
Author: sushilmohanty <[email protected]>
Authored: Wed Apr 12 18:10:36 2017 +0530
Committer: sushilmohanty <[email protected]>
Committed: Wed Apr 12 18:10:36 2017 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/lens-errors.conf    |  13 +
 .../lens/cube/error/LensCubeErrorCode.java      |   2 +
 .../NoCandidateFactAvailableException.java      |  13 +-
 .../lens/cube/metadata/CubeMetastoreClient.java |   5 +-
 .../org/apache/lens/cube/metadata/DateUtil.java |   3 +-
 .../lens/cube/metadata/FactPartition.java       |   7 +
 .../lens/cube/metadata/MetastoreUtil.java       |   2 +-
 .../apache/lens/cube/metadata/TimeRange.java    |   4 +
 .../cube/parse/AbridgedTimeRangeWriter.java     |   3 -
 .../parse/CandidateCoveringSetsResolver.java    |  67 ++-
 .../apache/lens/cube/parse/CandidateDim.java    |   6 +-
 .../apache/lens/cube/parse/CandidateTable.java  |   2 +-
 .../cube/parse/CandidateTablePruneCause.java    |  41 +-
 .../apache/lens/cube/parse/CandidateUtil.java   |  15 +-
 .../lens/cube/parse/CubeQueryContext.java       |  83 ++--
 .../lens/cube/parse/CubeQueryRewriter.java      |   3 +-
 .../lens/cube/parse/CubeSemanticAnalyzer.java   |   3 +-
 .../cube/parse/DenormalizationResolver.java     |  22 +-
 .../lens/cube/parse/ExpressionResolver.java     |   3 +-
 .../apache/lens/cube/parse/JoinCandidate.java   |   2 +-
 .../cube/parse/MaxCoveringFactResolver.java     |   1 -
 .../org/apache/lens/cube/parse/PruneCauses.java |   3 +-
 .../lens/cube/parse/StorageCandidate.java       |  36 +-
 .../lens/cube/parse/StorageTableResolver.java   |  20 +-
 .../apache/lens/cube/parse/UnionCandidate.java  |  24 +-
 .../lens/cube/parse/UnionQueryWriter.java       |  22 +-
 .../apache/lens/driver/cube/RewriterPlan.java   |   6 +-
 .../apache/lens/cube/parse/CubeTestSetup.java   |  25 +-
 .../lens/cube/parse/TestAggregateResolver.java  |  42 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    | 226 ++++-----
 .../cube/parse/TestBetweenTimeRangeWriter.java  |   6 +-
 .../lens/cube/parse/TestBridgeTableQueries.java |  40 +-
 .../lens/cube/parse/TestCubeRewriter.java       |  11 +-
 .../cube/parse/TestDenormalizationResolver.java |  10 +-
 .../lens/cube/parse/TestTimeRangeResolver.java  |  36 +-
 .../cube/parse/TestUnionAndJoinCandidates.java  |  24 +-
 .../lens/cube/parse/TestUnionQueries.java       | 464 +++++++------------
 .../resources/schema/cubes/base/basecube.xml    |  20 +
 .../resources/schema/cubes/base/testcube.xml    |  23 +
 .../resources/schema/cubes/derived/der1.xml     |  20 +
 .../resources/schema/cubes/derived/der2.xml     |  20 +
 .../resources/schema/cubes/derived/der3.xml     |  20 +
 .../schema/cubes/derived/derivedcube.xml        |  20 +
 .../cubes/derived/union_join_ctx_der1.xml       |  20 +
 .../resources/schema/dimensions/citydim.xml     |  20 +
 .../resources/schema/dimensions/countrydim.xml  |  20 +
 .../resources/schema/dimensions/cycledim1.xml   |  20 +
 .../resources/schema/dimensions/cycledim2.xml   |  20 +
 .../test/resources/schema/dimensions/daydim.xml |  20 +
 .../resources/schema/dimensions/hourdim.xml     |  20 +
 .../test/resources/schema/dimensions/sports.xml |  20 +
 .../resources/schema/dimensions/statedim.xml    |  20 +
 .../resources/schema/dimensions/testdim2.xml    |  20 +
 .../resources/schema/dimensions/testdim3.xml    |  20 +
 .../resources/schema/dimensions/testdim4.xml    |  20 +
 .../schema/dimensions/unreachabledim.xml        |  20 +
 .../schema/dimensions/user_interests.xml        |  20 +
 .../resources/schema/dimensions/userdim.xml     |  20 +
 .../test/resources/schema/dimensions/zipdim.xml |  20 +
 .../resources/schema/dimtables/citytable.xml    |  37 ++
 .../resources/schema/dimtables/citytable2.xml   |  20 +
 .../resources/schema/dimtables/citytable3.xml   |  20 +
 .../resources/schema/dimtables/citytable4.xml   |  20 +
 .../resources/schema/dimtables/countrytable.xml |  20 +
 .../dimtables/countrytable_partitioned.xml      |  20 +
 .../resources/schema/dimtables/cycledim1tbl.xml |  20 +
 .../resources/schema/dimtables/cycledim2tbl.xml |  20 +
 .../resources/schema/dimtables/daydimtbl.xml    |  20 +
 .../resources/schema/dimtables/hourdimtbl.xml   |  20 +
 .../resources/schema/dimtables/sports_tbl.xml   |  20 +
 .../resources/schema/dimtables/statetable.xml   |  38 ++
 .../schema/dimtables/statetable_partitioned.xml |  20 +
 .../resources/schema/dimtables/testdim2tbl.xml  |  20 +
 .../resources/schema/dimtables/testdim2tbl2.xml |  20 +
 .../resources/schema/dimtables/testdim2tbl3.xml |  20 +
 .../resources/schema/dimtables/testdim3tbl.xml  |  20 +
 .../resources/schema/dimtables/testdim4tbl.xml  |  20 +
 .../schema/dimtables/unreachabledimtable.xml    |  20 +
 .../schema/dimtables/user_interests_tbl.xml     |  20 +
 .../resources/schema/dimtables/usertable.xml    |  20 +
 .../resources/schema/dimtables/ziptable.xml     |  20 +
 .../test/resources/schema/facts/cheapfact.xml   |  20 +
 .../test/resources/schema/facts/summary1.xml    |  20 +
 .../test/resources/schema/facts/summary2.xml    |  20 +
 .../test/resources/schema/facts/summary3.xml    |  20 +
 .../test/resources/schema/facts/summary4.xml    |  20 +
 .../test/resources/schema/facts/testfact.xml    |  20 +
 .../resources/schema/facts/testfact1_base.xml   |  20 +
 .../schema/facts/testfact1_raw_base.xml         |  20 +
 .../test/resources/schema/facts/testfact2.xml   |  20 +
 .../resources/schema/facts/testfact2_base.xml   |  20 +
 .../resources/schema/facts/testfact2_raw.xml    |  20 +
 .../schema/facts/testfact2_raw_base.xml         |  20 +
 .../resources/schema/facts/testfact3_base.xml   |  20 +
 .../schema/facts/testfact3_raw_base.xml         |  20 +
 .../schema/facts/testfact4_raw_base.xml         |  20 +
 .../resources/schema/facts/testfact5_base.xml   |  20 +
 .../schema/facts/testfact5_raw_base.xml         |  20 +
 .../resources/schema/facts/testfact6_base.xml   |  20 +
 .../schema/facts/testfact_continuous.xml        |  20 +
 .../schema/facts/testfact_deprecated.xml        |  20 +
 .../resources/schema/facts/testfactmonthly.xml  |  20 +
 .../schema/facts/union_join_ctx_fact1.xml       |  20 +
 .../schema/facts/union_join_ctx_fact2.xml       |  20 +
 .../schema/facts/union_join_ctx_fact3.xml       |  20 +
 .../schema/facts/union_join_ctx_fact5.xml       |  20 +
 .../schema/facts/union_join_ctx_fact6.xml       |  20 +
 .../src/test/resources/schema/storages/c0.xml   |  20 +
 .../src/test/resources/schema/storages/c1.xml   |  20 +
 .../src/test/resources/schema/storages/c2.xml   |  20 +
 .../src/test/resources/schema/storages/c3.xml   |  20 +
 .../src/test/resources/schema/storages/c4.xml   |  20 +
 .../src/test/resources/schema/storages/c5.xml   |  20 +
 .../src/test/resources/schema/storages/c99.xml  |  20 +
 .../lens/driver/jdbc/ColumnarSQLRewriter.java   |   2 +-
 .../lens/driver/jdbc/DruidSQLRewriter.java      |   2 +-
 .../server/metastore/MetastoreResource.java     |   2 +-
 117 files changed, 2167 insertions(+), 712 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-api/src/main/resources/lens-errors.conf
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/lens-errors.conf 
b/lens-api/src/main/resources/lens-errors.conf
index 29e24cf..94505ef 100644
--- a/lens-api/src/main/resources/lens-errors.conf
+++ b/lens-api/src/main/resources/lens-errors.conf
@@ -326,6 +326,19 @@ lensCubeErrorsForQuery = [
     errorMsg = "Could not find queried table or chain: %s"
   }
 
+  {
+    errorCode = 3034
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "%s does not have any facts that can cover the requested time 
range : %s and  queried measure set : %s"
+  }
+
+  {
+    errorCode = 3035
+    httpStatusCode = ${BAD_REQUEST}
+    errorMsg = "%s does not have any facts that can cover the queried measure 
set : %s"
+  }
+
+
 ]
 
 lensCubeErrorsForMetastore = [

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java 
b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
index 571b481..d98c4c5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/error/LensCubeErrorCode.java
@@ -57,6 +57,8 @@ public enum LensCubeErrorCode {
   STORAGE_UNION_DISABLED(3031, 1500),
   COULD_NOT_PARSE_EXPRESSION(3032, 1500),
   QUERIED_TABLE_NOT_FOUND(3033, 0),
+  NO_UNION_CANDIDATE_AVAILABLE(3034, 1501),
+  NO_JOIN_CANDIDATE_AVAILABLE(3035, 1502),
   // Error codes greater than 3100 are errors while doing a metastore 
operation.
   ERROR_IN_ENTITY_DEFINITION(3101, 100),
   TIMELINE_ABSENT(3102, 100),

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
index 6f08d0f..21dda16 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/error/NoCandidateFactAvailableException.java
@@ -28,26 +28,19 @@ import lombok.Getter;
 
 /**
  * Note: This class is mainly meant for test cases to assert the detailed 
reasons (stored in
- * {@link #briefAndDetailedError} and {@link #cubeQueryContext}) leading to 
"No Candidate was found"
+ * {@link #briefAndDetailedError}  leading to "No Candidate was found"
  */
 public class NoCandidateFactAvailableException extends LensException {
 
   @Getter
-  private final CubeQueryContext cubeQueryContext;
-  @Getter
   private final PruneCauses<StorageCandidate> briefAndDetailedError;
 
   public NoCandidateFactAvailableException(CubeQueryContext cubeql) {
-    this(cubeql.getStoragePruningMsgs().getBriefCause(), cubeql);
-  }
-
-  public NoCandidateFactAvailableException(String errMsg, CubeQueryContext 
cubeql) {
-    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(), 
errMsg);
-    this.cubeQueryContext = cubeql;
+    super(LensCubeErrorCode.NO_CANDIDATE_FACT_AVAILABLE.getLensErrorInfo(),
+        cubeql.getStoragePruningMsgs().getBriefCause());
     this.briefAndDetailedError = cubeql.getStoragePruningMsgs();
   }
 
-
   public PruneCauses.BriefAndDetailedError getJsonMessage() {
     return briefAndDetailedError.toJsonObject();
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index b445447..b5c4c89 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -422,7 +422,7 @@ public class CubeMetastoreClient {
     public TreeMap<UpdatePeriod, 
CaseInsensitiveStringHashMap<PartitionTimeline>> get(String fact, String 
storage)
       throws HiveException, LensException {
       // SUSPEND CHECKSTYLE CHECK DoubleCheckedLockingCheck
-      // Unique key for the timeline cache, based on storageName and fact.
+      // Unique key for the timeline cache, based on storage and fact.
       String timeLineKey = (Storage.getPrefix(storage)+ fact).toLowerCase();
       synchronized (this) {
         if (get(timeLineKey) == null) {
@@ -1756,7 +1756,8 @@ public class CubeMetastoreClient {
         for (Map.Entry entry : updatePeriodToTableMap.entrySet()) {
           XUpdatePeriodTableDescriptor updatePeriodTableDescriptor = new 
XUpdatePeriodTableDescriptor();
           
updatePeriodTableDescriptor.setTableDesc(getStorageTableDescFromHiveTable(
-            
this.getHiveTable(MetastoreUtil.getFactOrDimtableStorageTableName(cft.getName(),
 (String) entry.getValue()))));
+            
this.getHiveTable(MetastoreUtil.getFactOrDimtableStorageTableName(cft.getName(),
+                (String) entry.getValue()))));
           
updatePeriodTableDescriptor.setUpdatePeriod(XUpdatePeriod.valueOf(((UpdatePeriod)entry.getKey()).name()));
           
xUpdatePeriods.getUpdatePeriodTableDescriptor().add(updatePeriodTableDescriptor);
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
index d10d72e..99ad233 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/DateUtil.java
@@ -324,7 +324,8 @@ public final class DateUtil {
     }
   }
 
-  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, 
long millisInInterval, UpdatePeriod interval) {
+  private static CoveringInfo getMilliSecondCoveringInfo(Date from, Date to, 
long millisInInterval,
+      UpdatePeriod interval) {
     long diff = to.getTime() - from.getTime();
     return new CoveringInfo((int) (diff / millisInInterval),
       Stream.of(from, to).allMatch(a->interval.truncate(a).equals(a)));

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index b90b569..ed940cc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -64,6 +64,13 @@ public class FactPartition implements 
Comparable<FactPartition> {
       this.storageTables.addAll(storageTables);
     }
   }
+
+  /**
+   *  Partition should not be used a indicative of the class itself.
+   *  New Fact partition created includes more final partitions with that 
creation.
+   * @return
+   */
+
   public FactPartition withoutContaining() {
     return new FactPartition(this.getPartCol(), this.getPartSpec(), 
this.getPeriod(), null, this
       .getPartFormat(), this.getStorageTables());

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
index 1e8621d..599027f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreUtil.java
@@ -591,7 +591,7 @@ public class MetastoreUtil {
     return copy;
   }
 
-  public static String getUpdatePeriodStoragePrefixKey(String factTableName , 
String storageName, String updatePeriod) {
+  public static String getUpdatePeriodStoragePrefixKey(String factTableName, 
String storageName, String updatePeriod) {
     return MetastoreUtil.getFactKeyPrefix(factTableName) + "." + storageName + 
"." + updatePeriod;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
index 242d3ba..8286894 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimeRange.java
@@ -67,6 +67,10 @@ public class TimeRange {
     return timeRange;
   }
 
+  public long milliseconds() {
+    return toDate.getTime() - fromDate.getTime();
+  }
+
   public static class TimeRangeBuilder {
     private final TimeRange range;
 

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
index 3916a48..10f98a1 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/AbridgedTimeRangeWriter.java
@@ -19,12 +19,9 @@
 
 package org.apache.lens.cube.parse;
 
-import static com.google.common.collect.Sets.newHashSet;
-import static java.util.Optional.ofNullable;
 import static java.util.stream.Collectors.toMap;
 
 import java.util.*;
-import java.util.stream.Collectors;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.server.api.error.LensException;

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
index 0aafda6..b22d972 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateCoveringSetsResolver.java
@@ -22,7 +22,7 @@ import static 
org.apache.lens.cube.parse.CandidateUtil.getColumns;
 
 import java.util.*;
 
-import org.apache.lens.cube.error.NoCandidateFactAvailableException;
+import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
@@ -39,10 +39,9 @@ public class CandidateCoveringSetsResolver implements 
ContextRewriter {
     }
 
     if (cubeql.getCandidates().size() == 0){
-      throw new NoCandidateFactAvailableException(cubeql);
+      cubeql.throwNoCandidateFactException();
     }
 
-
     List<QueriedPhraseContext> qpcList = cubeql.getQueriedPhrases();
     Set<QueriedPhraseContext> queriedMsrs = new HashSet<>();
     for (QueriedPhraseContext qpc : qpcList) {
@@ -53,23 +52,19 @@ public class CandidateCoveringSetsResolver implements 
ContextRewriter {
 
     List<Candidate> timeRangeCoveringSet = 
resolveTimeRangeCoveringFactSet(cubeql, queriedMsrs, qpcList);
     if (timeRangeCoveringSet.isEmpty()) {
-      throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
-        + " does not have any facts that can cover the requested time range " 
+ cubeql.getTimeRanges().toString()
-        + " and queried measure set " + getColumns(queriedMsrs).toString(),
-        cubeql);
+      throw new 
LensException(LensCubeErrorCode.NO_UNION_CANDIDATE_AVAILABLE.getLensErrorInfo(),
+          cubeql.getCube().getName(), cubeql.getTimeRanges().toString(), 
getColumns(queriedMsrs).toString());
     }
     log.info("Time covering candidates :{}", timeRangeCoveringSet);
 
     if (queriedMsrs.isEmpty()) {
       cubeql.getCandidates().clear();
       cubeql.getCandidates().addAll(timeRangeCoveringSet);
-    } else {
+    } else if (!timeRangeCoveringSet.isEmpty()) {
       List<List<Candidate>> measureCoveringSets = 
resolveJoinCandidates(timeRangeCoveringSet, queriedMsrs, cubeql);
       if (measureCoveringSets.isEmpty()) {
-        throw new NoCandidateFactAvailableException(cubeql.getCube().getName()
-          + " does not have any facts that can cover the queried measure set "
-          + getColumns(queriedMsrs).toString(),
-          cubeql);
+        throw new 
LensException(LensCubeErrorCode.NO_JOIN_CANDIDATE_AVAILABLE.getLensErrorInfo(),
+            cubeql.getCube().getName(), getColumns(queriedMsrs).toString());
       }
       updateFinalCandidates(measureCoveringSets, cubeql);
     }
@@ -123,14 +118,15 @@ public class CandidateCoveringSetsResolver implements 
ContextRewriter {
 
   private List<Candidate> resolveTimeRangeCoveringFactSet(CubeQueryContext 
cubeql,
       Set<QueriedPhraseContext> queriedMsrs, List<QueriedPhraseContext> 
qpcList) throws LensException {
-    // All Candidates
-    List<Candidate> allCandidates = new ArrayList<>(cubeql.getCandidates());
-    // Partially valid candidates
-    List<Candidate> allCandidatesPartiallyValid = new ArrayList<>();
     List<Candidate> candidateSet = new ArrayList<>();
-    for (Candidate cand : allCandidates) {
-      // Assuming initial list of candidates populated are StorageCandidate
-      if (cand instanceof StorageCandidate) {
+    if (!cubeql.getCandidates().isEmpty()) {
+      // All Candidates
+      List<Candidate> allCandidates = new ArrayList<>(cubeql.getCandidates());
+      // Partially valid candidates
+      List<Candidate> allCandidatesPartiallyValid = new ArrayList<>();
+      for (Candidate cand : allCandidates) {
+        // Assuming initial list of candidates populated are StorageCandidate
+        assert (cand instanceof StorageCandidate);
         StorageCandidate sc = (StorageCandidate) cand;
         if (CandidateUtil.isValidForTimeRanges(sc, cubeql.getTimeRanges())) {
           candidateSet.add(CandidateUtil.cloneStorageCandidate(sc));
@@ -138,26 +134,25 @@ public class CandidateCoveringSetsResolver implements 
ContextRewriter {
           
allCandidatesPartiallyValid.add(CandidateUtil.cloneStorageCandidate(sc));
         } else {
           cubeql.addCandidatePruningMsg(sc, 
CandidateTablePruneCause.storageNotAvailableInRange(
-            cubeql.getTimeRanges()));
+              cubeql.getTimeRanges()));
         }
-      } else {
-        throw new LensException("Not a StorageCandidate!!");
+
       }
+      // Get all covering fact sets
+      List<UnionCandidate> unionCoveringSet =
+          getCombinations(new ArrayList<>(allCandidatesPartiallyValid), 
cubeql);
+      // Sort the Collection based on no of elements
+      unionCoveringSet.sort(new 
CandidateUtil.ChildrenSizeBasedCandidateComparator<UnionCandidate>());
+      // prune non covering sets
+      pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql);
+      // prune candidate set which doesn't contain any common measure i
+      pruneUnionCoveringSetWithoutAnyCommonMeasure(unionCoveringSet, 
queriedMsrs, cubeql);
+      // prune redundant covering sets
+      pruneRedundantUnionCoveringSets(unionCoveringSet);
+      // pruing done in the previous steps, now create union candidates
+      candidateSet.addAll(unionCoveringSet);
+      updateQueriableMeasures(candidateSet, qpcList, cubeql);
     }
-    // Get all covering fact sets
-    List<UnionCandidate> unionCoveringSet =
-        getCombinations(new ArrayList<>(allCandidatesPartiallyValid), cubeql);
-    // Sort the Collection based on no of elements
-    unionCoveringSet.sort(new 
CandidateUtil.ChildrenSizeBasedCandidateComparator<UnionCandidate>());
-    // prune non covering sets
-    pruneUnionCandidatesNotCoveringAllRanges(unionCoveringSet, cubeql);
-    // prune candidate set which doesn't contain any common measure i
-    pruneUnionCoveringSetWithoutAnyCommonMeasure(unionCoveringSet, 
queriedMsrs, cubeql);
-    // prune redundant covering sets
-    pruneRedundantUnionCoveringSets(unionCoveringSet);
-    // pruing done in the previous steps, now create union candidates
-    candidateSet.addAll(unionCoveringSet);
-    updateQueriableMeasures(candidateSet, qpcList, cubeql);
     return candidateSet;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
index 0dde72d..ce734cf 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateDim.java
@@ -38,7 +38,7 @@ public class CandidateDim implements CandidateTable {
   final CubeDimensionTable dimtable;
   @Getter
   @Setter
-  private String storageName;
+  private String storageTable;
   @Getter
   @Setter
   private String whereClause;
@@ -73,11 +73,11 @@ public class CandidateDim implements CandidateTable {
       String database = SessionState.get().getCurrentDatabase();
       // Add database name prefix for non default database
       if (StringUtils.isNotBlank(database) && 
!"default".equalsIgnoreCase(database)) {
-        storageName = database + "." + storageName;
+        storageTable = database + "." + storageTable;
       }
       dbResolved = true;
     }
-    return storageName + " " + alias;
+    return storageTable + " " + alias;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
index 168dcc6..c909545 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTable.java
@@ -40,7 +40,7 @@ public interface CandidateTable {
    * Get storage table corresponding to this candidate
    * @return
    */
-  String getStorageName();
+  String getStorageTable();
 
   /**
    * Get candidate table

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 1de491c..1c0d356 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -18,16 +18,17 @@
  */
 package org.apache.lens.cube.parse;
 
-import static com.google.common.collect.Lists.newArrayList;
-import static com.google.common.collect.Lists.partition;
 import static java.util.stream.Collectors.toSet;
+
 import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode.*;
 
+import static com.google.common.collect.Lists.newArrayList;
+
 import java.util.*;
-import java.util.stream.Stream;
 
 import org.apache.lens.cube.metadata.TimeRange;
 
+
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 
 import com.google.common.collect.Lists;
@@ -47,7 +48,17 @@ public class CandidateTablePruneCause {
   public enum CandidateTablePruneCode {
     // other fact set element is removed
     ELEMENT_IN_SET_PRUNED("Other candidate from measure covering set is 
pruned"),
-
+    // least weight not satisfied
+    MORE_WEIGHT("Picked table had more weight than minimum."),
+    // partial data is enabled, another fact has more data.
+    LESS_DATA("Picked table has less data than the maximum"),
+    // cube table has more partitions
+    MORE_PARTITIONS("Picked table has more partitions than minimum"),
+    // storage is not supported by execution engine/driver
+    UNSUPPORTED_STORAGE("Unsupported Storage"),
+    // invalid cube table
+    INVALID("Invalid cube table provided in query"),
+    // expression is not evaluable in the candidate
     COLUMN_NOT_FOUND("%s are not %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         if (causes.size() == 1) {
@@ -81,9 +92,6 @@ public class CandidateTablePruneCause {
     PART_COL_DOES_NOT_EXIST("Partition column does not exist"),
     // Range is not supported by this storage table
     TIME_RANGE_NOT_ANSWERABLE("Range not answerable"),
-    // storage is not supported by execution engine/driver
-    UNSUPPORTED_STORAGE("Unsupported Storage"),
-
     STORAGE_NOT_AVAILABLE_IN_RANGE("No storages available for all of these 
time ranges: %s") {
       @Override
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
@@ -94,20 +102,11 @@ public class CandidateTablePruneCause {
       }
     },
 
-    // least weight not satisfied
-    MORE_WEIGHT("Picked table had more weight than minimum."),
-    // partial data is enabled, another fact has more data.
-    LESS_DATA("Picked table has less data than the maximum"),
-    // cube table has more partitions
-    MORE_PARTITIONS("Picked table has more partitions than minimum"),
-    // invalid cube table
-    INVALID("Invalid cube table provided in query"), //TODO move up. This does 
not make sense here.
-    // expression is not evaluable in the candidate
     EXPRESSION_NOT_EVALUABLE("%s expressions not evaluable") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         return new String[]{
           
causes.stream().map(CandidateTablePruneCause::getMissingExpressions).flatMap(Collection::stream)
-            .collect(toSet()).toString()
+            .collect(toSet()).toString(),
         };
       }
     },
@@ -154,7 +153,7 @@ public class CandidateTablePruneCause {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         return new String[]{
           
causes.stream().map(CandidateTablePruneCause::getJoinColumns).flatMap(Collection::stream)
-            .collect(toSet()).toString()
+            .collect(toSet()).toString(),
         };
       }
     },
@@ -164,7 +163,7 @@ public class CandidateTablePruneCause {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         return new String[]{
           
causes.stream().map(CandidateTablePruneCause::getColumnsMissingDefaultAggregate).flatMap(Collection::stream)
-            .collect(toSet()).toString()
+            .collect(toSet()).toString(),
         };
       }
     },
@@ -172,7 +171,7 @@ public class CandidateTablePruneCause {
     MISSING_PARTITIONS("Missing partitions for the cube table: %s") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         return new String[]{
-          
causes.stream().map(CandidateTablePruneCause::getMissingPartitions).collect(toSet()).toString()
+          
causes.stream().map(CandidateTablePruneCause::getMissingPartitions).collect(toSet()).toString(),
         };
       }
     },
@@ -181,7 +180,7 @@ public class CandidateTablePruneCause {
             + " %s. Please try again later or rerun after removing incomplete 
metrics") {
       Object[] getFormatPlaceholders(Set<CandidateTablePruneCause> causes) {
         return new String[]{
-          
causes.stream().map(CandidateTablePruneCause::getIncompletePartitions).collect(toSet()).toString()
+          
causes.stream().map(CandidateTablePruneCause::getIncompletePartitions).collect(toSet()).toString(),
         };
       }
     };

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
index 5db1344..b9ff0ef 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateUtil.java
@@ -40,7 +40,12 @@ import com.google.common.collect.TreeRangeSet;
 /**
  * Placeholder for Util methods that will be required for {@link Candidate}
  */
-public class CandidateUtil {
+public final class CandidateUtil {
+
+  private CandidateUtil() {
+    // Added due to checkstyle error getting below :
+    // (design) HideUtilityClassConstructor: Utility classes should not have a 
public or default constructor.
+  }
 
   /**
    * Returns true if the Candidate is valid for all the timeranges based on 
its start and end times.
@@ -217,10 +222,12 @@ public class CandidateUtil {
     return false;
   }
 
-  public static String getTimeRangeWhereClasue(TimeRangeWriter rangeWriter, 
StorageCandidate sc, TimeRange range) throws LensException {
-    String rangeWhere = rangeWriter.getTimeRangeWhereClause(sc.getCubeql(), 
sc.getCubeql().getAliasForTableName(sc.getCube().getName()),
+  public static String getTimeRangeWhereClasue(TimeRangeWriter rangeWriter,
+      StorageCandidate sc, TimeRange range) throws LensException {
+    String rangeWhere = rangeWriter.getTimeRangeWhereClause(sc.getCubeql(),
+        sc.getCubeql().getAliasForTableName(sc.getCube().getName()),
       sc.getRangeToPartitions().get(range));
-    if(sc.getRangeToExtraWhereFallBack().containsKey(range)){
+    if (sc.getRangeToExtraWhereFallBack().containsKey(range)) {
       rangeWhere =  "((" + rangeWhere + ") and  (" + 
sc.getRangeToExtraWhereFallBack().get(range) + "))";
     }
     return rangeWhere;

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index 193bf44..c63c4c0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -19,27 +19,18 @@
 
 package org.apache.lens.cube.parse;
 
-import static com.google.common.base.Preconditions.checkArgument;
+
 import static java.util.stream.Collectors.toSet;
+
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
+
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_REWRITE_DIM_FILTER_TO_FACT_FILTER;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.NON_EXISTING_PARTITIONS;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.REWRITE_DIM_FILTER_TO_FACT_FILTER;
+
+import static com.google.common.base.Preconditions.checkArgument;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
+import java.util.*;
 import java.util.function.Predicate;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
@@ -59,15 +50,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.JoinCond;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.ParseUtils;
-import org.apache.hadoop.hive.ql.parse.QB;
-import org.apache.hadoop.hive.ql.parse.QBJoinTree;
-import org.apache.hadoop.hive.ql.parse.QBParseInfo;
+import org.apache.hadoop.hive.ql.parse.*;
 import org.apache.hadoop.util.ReflectionUtils;
 
 import org.codehaus.jackson.map.ObjectMapper;
@@ -75,11 +58,7 @@ import org.codehaus.jackson.map.ObjectMapper;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.Getter;
-import lombok.Setter;
-import lombok.ToString;
+import lombok.*;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -756,7 +735,7 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
       }
     } else { // (joinTree.getBaseSrc()[0] != null){
       String alias = joinTree.getBaseSrc()[0].toLowerCase();
-      builder.append(getStorageStringWithAlias(candidate , dimsToQuery, 
alias));
+      builder.append(getStorageStringWithAlias(candidate, dimsToQuery, alias));
       joiningTables.add(alias);
     }
     if (joinTree.getJoinCond() != null) {
@@ -854,23 +833,37 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     if (hasCubeInQuery()) {
       if (candidates.size() > 0) {
         cand = candidates.iterator().next();
-        log.info("Available Candidates:{}, picking up Candaidate: {} for 
querying", candidates, cand);
+        log.info("Available Candidates:{}, picking up Candidate: {} for 
querying", candidates, cand);
       } else {
-        if (!storagePruningMsgs.isEmpty()) {
-          try(ByteArrayOutputStream out = new ByteArrayOutputStream()) {
-            ObjectMapper mapper = new ObjectMapper();
-            mapper.writeValue(out, storagePruningMsgs.getJsonObject());
-            log.info("No candidate found because: {}", out.toString("UTF-8"));
-          } catch (Exception e) {
-            throw new LensException("Error writing fact pruning messages", e);
+        throwNoCandidateFactException();
+      }
+    }
+    return cand;
+  }
+
+  void throwNoCandidateFactException() throws LensException {
+    String reason = "";
+    if (!storagePruningMsgs.isEmpty()) {
+      ByteArrayOutputStream out = null;
+      try {
+        ObjectMapper mapper = new ObjectMapper();
+        out = new ByteArrayOutputStream();
+        mapper.writeValue(out, storagePruningMsgs.getJsonObject());
+        reason = out.toString("UTF-8");
+      } catch (Exception e) {
+        throw new LensException("Error writing fact pruning messages", e);
+      } finally {
+        if (out != null) {
+          try {
+            out.close();
+          } catch (IOException e) {
+            throw new LensException(e);
           }
         }
-        log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, 
Cause {}",
-            storagePruningMsgs.toJsonObject());
-        throw new NoCandidateFactAvailableException(this);
       }
     }
-    return cand;
+    log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause 
{}", storagePruningMsgs.toJsonObject());
+    throw new NoCandidateFactAvailableException(this);
   }
 
   private HQLContextInterface hqlContext;
@@ -1026,8 +1019,8 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     //update dim filter with fact filter, set where string in sc
     if (scSet.size() > 0) {
       for (StorageCandidate sc : scSet) {
-        String qualifiedStorageTable = sc.getStorageName();
-        String storageTable = 
qualifiedStorageTable.substring(qualifiedStorageTable.indexOf(".") + 1); //TODO 
this looks useless
+        String qualifiedStorageTable = sc.getStorageTable();
+        String storageTable = 
qualifiedStorageTable.substring(qualifiedStorageTable.indexOf(".") + 1);
         String where = getWhere(sc, autoJoinCtx,
           sc.getQueryAst().getWhereAST(), 
getAliasForTableName(sc.getBaseTable().getName()),
           shouldReplaceDimFilterWithFactFilter(), storageTable, dimsToQuery);

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 300d134..6bee386 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -208,7 +208,7 @@ public class CubeQueryRewriter {
   public CubeQueryContext rewrite(ASTNode astnode) throws LensException {
     CubeSemanticAnalyzer analyzer;
     try {
-      analyzer = new CubeSemanticAnalyzer(hconf);
+      analyzer = new CubeSemanticAnalyzer(conf, hconf);
       analyzer.analyze(astnode, qlCtx);
     } catch (SemanticException e) {
       throw new LensException(SYNTAX_ERROR.getLensErrorInfo(), e, 
e.getMessage());
@@ -244,6 +244,7 @@ public class CubeQueryRewriter {
        */
       MethodMetricsContext mgauge = 
MethodMetricsFactory.createMethodGauge(ctx.getConf(), true,
         rewriter.getClass().getCanonicalName() + ITER_STR + i);
+
       rewriter.rewriteContext(ctx);
       mgauge.markSuccess();
       i++;

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
index 8214f65..e2015c2 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeSemanticAnalyzer.java
@@ -22,6 +22,7 @@ package org.apache.lens.cube.parse;
 import java.util.ArrayList;
 import java.util.List;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.parse.*;
@@ -36,7 +37,7 @@ public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   @Getter
   private QB cubeQB;
 
-  public CubeSemanticAnalyzer(HiveConf hiveConf) throws SemanticException {
+  public CubeSemanticAnalyzer(Configuration queryConf, HiveConf hiveConf) 
throws SemanticException {
     super(new QueryState(hiveConf));
     setupRules();
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index e5cf916..30fa873 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -18,10 +18,10 @@
  */
 package org.apache.lens.cube.parse;
 
+import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.denormColumnNotFound;
+
 import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
 import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
-import static 
org.apache.hadoop.hive.ql.parse.HiveParser_SelectClauseParser.TOK_FUNCTION;
-import static 
org.apache.lens.cube.parse.CandidateTablePruneCause.denormColumnNotFound;
 
 import java.util.*;
 
@@ -109,7 +109,8 @@ public class DenormalizationResolver implements 
ContextRewriter {
             // there is no path
             // to the source table
             log.info("Adding denormalized column for column:{} for table:{}", 
col, table);
-            tableToRefCols.computeIfAbsent(table.getName(), k -> new 
HashSet<>()).add(refer);
+            String name = (table instanceof CandidateDim) ? table.getName() : 
table.getStorageTable();
+            tableToRefCols.computeIfAbsent(name, k -> new 
HashSet<>()).add(refer);
             // Add to optional tables
             for (ChainRefCol refCol : refer.col.getChainRefColumns()) {
               cubeql.addOptionalDimTable(refCol.getChainName(), table, false, 
refer.col.getName(), true,
@@ -146,7 +147,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
       if (!tableToRefCols.isEmpty()) {
         // pick referenced columns for fact
         if (sc != null) {
-          pickColumnsForTable(cubeql, sc.getName());
+          pickColumnsForTable(cubeql, sc.getStorageTable());
         }
         // pick referenced columns for dimensions
         if (dimsToQuery != null) {
@@ -178,7 +179,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
       if (!tableToRefCols.isEmpty()) {
         // pick referenced columns for fact
         if (sc != null) {
-          pickColumnsForTable(cubeql, sc.getName());
+          pickColumnsForTable(cubeql, sc.getStorageTable());
         }
         // pick referenced columns for dimensions
         if (dimsToQuery != null) {
@@ -203,7 +204,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
     }
     // checks if the reference if picked for facts and dimsToQuery passed
     private boolean isPickedFor(PickedReference picked, StorageCandidate sc, 
Map<Dimension, CandidateDim> dimsToQuery) {
-      if (sc != null && picked.pickedFor.equalsIgnoreCase(sc.getName())) {
+      if (sc != null && 
picked.pickedFor.equalsIgnoreCase(sc.getStorageTable())) {
         return true;
       }
       if (dimsToQuery != null) {
@@ -263,10 +264,11 @@ public class DenormalizationResolver implements 
ContextRewriter {
       }
     }
 
-    private void replaceReferencedColumns(CubeQueryContext cubeql, 
StorageCandidate sc, boolean replaceFact) throws LensException {
+    private void replaceReferencedColumns(CubeQueryContext cubeql, 
StorageCandidate sc, boolean replaceFact)
+        throws LensException {
       QueryAST ast = cubeql;
-      boolean factRefExists = sc != null && tableToRefCols.get(sc.getName()) 
!= null && !tableToRefCols.get(sc
-          .getName()).isEmpty();
+      boolean factRefExists = sc != null && 
tableToRefCols.get(sc.getStorageTable()) != null
+          && !tableToRefCols.get(sc.getStorageTable()).isEmpty();
       if (replaceFact && factRefExists) {
         ast = sc.getQueryAst();
       }
@@ -395,7 +397,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
         for (Iterator<StorageCandidate> i =
              
CandidateUtil.getStorageCandidates(cubeql.getCandidates()).iterator(); 
i.hasNext();) {
           StorageCandidate candidate = i.next();
-          Set<String> nonReachableFields = 
denormCtx.getNonReachableReferenceFields(candidate.getName());
+          Set<String> nonReachableFields = 
denormCtx.getNonReachableReferenceFields(candidate.getStorageTable());
           if (!nonReachableFields.isEmpty()) {
             log.info("Not considering fact table:{} as columns {} are not 
available", candidate, nonReachableFields);
             cubeql.addCandidatePruningMsg(candidate, 
denormColumnNotFound(nonReachableFields));

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 97a9ef0..926a4d0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -378,7 +378,8 @@ class ExpressionResolver implements ContextRewriter {
       return ec.isEvaluable(cTable);
     }
 
-    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, StorageCandidate 
sc, Map<Dimension, CandidateDim> dimsToQuery,
+    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, StorageCandidate sc,
+        Map<Dimension, CandidateDim> dimsToQuery,
       QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       log.info("Picking expressions for candidate {} ", sc);

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
index 6334062..52085ea 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -137,6 +137,6 @@ public class JoinCandidate implements Candidate {
   }
 
   private String getToString() {
-    return this.toStr = "JOIN[" + childCandidate1.toString() + ", " + 
childCandidate2.toString() + "]";
+    return "JOIN[" + childCandidate1.toString() + ", " + 
childCandidate2.toString() + "]";
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 0a24460..34180d1 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -95,7 +95,6 @@ class MaxCoveringFactResolver implements ContextRewriter {
         }
       }
     }
-    
//cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
   }
 
   private void resolveByDataCompleteness(CubeQueryContext cubeql) {

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 50ccab5..0996db5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -18,9 +18,10 @@
  */
 package org.apache.lens.cube.parse;
 
-import static com.google.common.collect.Sets.newHashSet;
 import static java.util.stream.Collectors.toMap;
 
+import static com.google.common.collect.Sets.newHashSet;
+
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashMap;

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
index 628e9aa..d95cf27 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageCandidate.java
@@ -29,7 +29,6 @@ import static 
org.apache.lens.cube.parse.StorageUtil.processExpressionsForComple
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.*;
-import java.util.stream.Collectors;
 
 import org.apache.lens.cube.metadata.AbstractCubeTable;
 import org.apache.lens.cube.metadata.CubeFactTable;
@@ -51,7 +50,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.util.ReflectionUtils;
 
 import org.antlr.runtime.CommonToken;
 
@@ -120,6 +118,8 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
   @Getter
   private String storageName;
   @Getter
+  private String storageTable;
+  @Getter
   @Setter
   private QueryAST queryAst;
   @Getter
@@ -190,8 +190,9 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     this.fact = fact;
     this.cubeql = cubeql;
     this.storageName = storageName;
+    this.storageTable = 
MetastoreUtil.getFactOrDimtableStorageTableName(fact.getName(), storageName);
     this.conf = cubeql.getConf();
-    this.name = 
MetastoreUtil.getFactOrDimtableStorageTableName(fact.getName(), storageName);
+    this.name = fact.getName();
     this.processTimePartCol = 
conf.get(CubeQueryConfUtil.PROCESS_TIME_PART_COL);
     String formatStr = 
conf.get(CubeQueryConfUtil.PART_WHERE_CLAUSE_DATE_FORMAT);
     if (formatStr != null) {
@@ -205,8 +206,8 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     if (storageTblNames.size() > 1) {
       isStorageTblsAtUpdatePeriodLevel = true;
     } else {
-      //if this.name is equal to the storage table name it implies 
isStorageTblsAtUpdatePeriodLevel is false
-      isStorageTblsAtUpdatePeriodLevel = 
!storageTblNames.iterator().next().equalsIgnoreCase(name);
+      //if this.storageTable is equal to the storage table name it implies 
isStorageTblsAtUpdatePeriodLevel is false
+      isStorageTblsAtUpdatePeriodLevel = 
!storageTblNames.iterator().next().equalsIgnoreCase(storageTable);
     }
     setStorageStartAndEndDate();
   }
@@ -417,8 +418,8 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
 
   private void updatePartitionStorage(FactPartition part) throws LensException 
{
     try {
-      if (client.factPartitionExists(fact, part, name)) {
-        part.getStorageTables().add(name);
+      if (client.factPartitionExists(fact, part, storageTable)) {
+        part.getStorageTables().add(storageTable);
         part.setFound(true);
       }
     } catch (HiveException e) {
@@ -478,7 +479,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     }
 
     if (!client.partColExists(this.getFact().getName(), storageName, partCol)) 
{
-      log.info("{} does not exist in {}", partCol, name);
+      log.info("{} does not exist in {}", partCol, storageTable);
       return false;
     }
 
@@ -488,7 +489,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     TreeSet<UpdatePeriod> remainingIntervals =  new TreeSet<>(updatePeriods);
     remainingIntervals.remove(maxInterval);
     if (!CandidateUtil.isCandidatePartiallyValidForTimeRange(
-      maxIntervalStorageTblStartDate, maxIntervalStorageTblEndDate,fromDate, 
toDate)) {
+      maxIntervalStorageTblStartDate, maxIntervalStorageTblEndDate, fromDate, 
toDate)) {
       //Check the time range in remainingIntervals as maxInterval is not useful
       return getPartitions(fromDate, toDate, partCol, partitions, 
remainingIntervals,
         addNonExistingParts, failOnPartialData, missingPartitions);
@@ -498,7 +499,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
       ? fromDate : maxIntervalStorageTblStartDate, maxInterval);
     Date floorToDate = 
DateUtil.getFloorDate(toDate.before(maxIntervalStorageTblEndDate)
       ? toDate : maxIntervalStorageTblEndDate, maxInterval);
-    if(ceilFromDate.equals(floorToDate) || floorToDate.before(ceilFromDate)) {
+    if (ceilFromDate.equals(floorToDate) || floorToDate.before(ceilFromDate)) {
       return getPartitions(fromDate, toDate, partCol, partitions, 
remainingIntervals,
         addNonExistingParts, failOnPartialData, missingPartitions);
     }
@@ -623,7 +624,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     Set<FactPartition> rangeParts = getPartitions(timeRange, 
validUpdatePeriods, true, failOnPartialData, missingParts);
     String partCol = timeRange.getPartitionColumn();
     boolean partColNotSupported = rangeParts.isEmpty();
-    String storageTableName = getName();
+    String storageTableName = getStorageTable();
 
     if (storagePruningMsgs.containsKey(this)) {
       List<CandidateTablePruneCause> causes = storagePruningMsgs.get(this);
@@ -813,13 +814,13 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
     StorageCandidate storageCandidateObj = (StorageCandidate) obj;
     //Assuming that same instance of cube and fact will be used across 
StorageCandidate s and hence relying directly
     //on == check for these.
-    return (this.cube == storageCandidateObj.cube && this.fact == 
storageCandidateObj.fact && this.name
-      .equals(storageCandidateObj.name));
+    return (this.cube == storageCandidateObj.cube && this.fact == 
storageCandidateObj.fact && this.storageTable
+      .equals(storageCandidateObj.storageTable));
   }
 
   @Override
   public int hashCode() {
-    return this.name.hashCode();
+    return this.storageTable.hashCode();
   }
 
   @Override
@@ -879,8 +880,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
   private boolean isUpdatePeriodUseful(TimeRange timeRange, UpdatePeriod 
updatePeriod) {
     try {
       if 
(!CandidateUtil.isCandidatePartiallyValidForTimeRange(getStorageTableStartDate(updatePeriod),
-        getStorageTableEndDate(updatePeriod), timeRange.getFromDate(), 
timeRange.getToDate()))
-      {
+        getStorageTableEndDate(updatePeriod), timeRange.getFromDate(), 
timeRange.getToDate())) {
         return false;
       }
       Date storageTblStartDate  = getStorageTableStartDate(updatePeriod);
@@ -983,7 +983,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
 
   public String getResolvedName() {
     if (resolvedName == null) {
-      return name;
+      return storageTable;
     }
     return resolvedName;
   }
@@ -1009,7 +1009,7 @@ public class StorageCandidate implements Candidate, 
CandidateTable {
       updatePeriodSpecificSc = new StorageCandidate(this);
       updatePeriodSpecificSc.truncatePartitions(period);
       
updatePeriodSpecificSc.setResolvedName(client.getStorageTableName(fact.getName(),
-        storageName, period));
+          storageName, period));
       periodSpecificScList.add(updatePeriodSpecificSc);
     }
     return periodSpecificScList;

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index 1a2d9a9..10c3bbe 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -134,8 +134,7 @@ class StorageTableResolver implements ContextRewriter {
       }
       if (!isTimeRangeAnswerableByThisCandidate) {
         candidateIterator.remove();
-      }
-      else if (failOnPartialData && !isComplete) {
+      } else if (failOnPartialData && !isComplete) {
         candidateIterator.remove();
         log.info("Not considering candidate:{} as its data is not is not 
complete", candidate);
         Set<StorageCandidate> scSet = 
CandidateUtil.getStorageCandidates(candidate);
@@ -228,8 +227,8 @@ class StorageTableResolver implements ContextRewriter {
           continue;
         }
         // pick the first storage table
-        candidate.setStorageName(storageTables.iterator().next());
-        candidate.setWhereClause(whereClauses.get(candidate.getStorageName()));
+        candidate.setStorageTable(storageTables.iterator().next());
+        
candidate.setWhereClause(whereClauses.get(candidate.getStorageTable()));
       }
     }
   }
@@ -262,7 +261,7 @@ class StorageTableResolver implements ContextRewriter {
       String str = 
conf.get(CubeQueryConfUtil.getValidStorageTablesKey(sc.getFact().getName()));
       List<String> validFactStorageTables =
         StringUtils.isBlank(str) ? null : 
Arrays.asList(StringUtils.split(str.toLowerCase(), ","));
-      storageTable = sc.getName();
+      storageTable = sc.getStorageTable();
       // Check if storagetable is in the list of valid storages.
       if (validFactStorageTables != null && 
!validFactStorageTables.contains(storageTable)) {
         log.info("Skipping storage table {} as it is not valid", storageTable);
@@ -287,12 +286,12 @@ class StorageTableResolver implements ContextRewriter {
         if (maxInterval != null && updatePeriod.compareTo(maxInterval) > 0) {
           // if user supplied max interval, all intervals larger than that are 
useless.
           log.info("Skipping update period {} for candidate {} since it's more 
than max interval supplied({})",
-            updatePeriod, sc.getName(), maxInterval);
+            updatePeriod, sc.getStorageTable(), maxInterval);
           skipUpdatePeriodCauses.put(updatePeriod.toString(), 
SkipUpdatePeriodCode.UPDATE_PERIOD_BIGGER_THAN_MAX);
         } else if (validUpdatePeriods != null && 
!validUpdatePeriods.contains(updatePeriod.name().toLowerCase())) {
           // if user supplied valid update periods, other update periods are 
useless
           log.info("Skipping update period {} for candidate {} for storage {} 
since it's invalid",
-            updatePeriod, sc.getName(), storageTable);
+            updatePeriod, sc.getStorageTable(), storageTable);
           skipUpdatePeriodCauses.put(updatePeriod.toString(), 
SkipUpdatePeriodCode.INVALID);
         } else if (!sc.isUpdatePeriodUseful(updatePeriod)) {
           // if the storage candidate finds this update useful to keep looking 
at the time ranges queried
@@ -328,10 +327,7 @@ class StorageTableResolver implements ContextRewriter {
             //This is the prune cause
             pruningCauseForThisTimeRange =
               new 
CandidateTablePruneCause(CandidateTablePruneCode.TIME_RANGE_NOT_ANSWERABLE);
-          }
-          //Check partition (or fallback) column existence
-          //TODO Shouldn't we check atleast once for the existence of part 
column
-          else if (cubeql.shouldReplaceTimeDimWithPart()) {
+          } else if (cubeql.shouldReplaceTimeDimWithPart()) {
             if (!client.partColExists(sc.getFact().getName(), 
sc.getStorageName(), range.getPartitionColumn())) {
               pruningCauseForThisTimeRange = 
partitionColumnsMissing(range.getPartitionColumn());
               TimeRange fallBackRange = StorageUtil.getFallbackRange(range, 
sc.getFact().getName(), cubeql);
@@ -352,7 +348,7 @@ class StorageTableResolver implements ContextRewriter {
             }
           }
 
-          if(pruningCauseForThisTimeRange != null) {
+          if (pruningCauseForThisTimeRange != null) {
             allPruningCauses.add(pruningCauseForThisTimeRange);
           }
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
index 62ebf71..7f07dbc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
@@ -24,6 +24,8 @@ import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
+import com.google.common.collect.Maps;
+
 /**
  * Represents a union of two candidates
  */
@@ -41,7 +43,7 @@ public class UnionCandidate implements Candidate {
    */
   private List<Candidate> childCandidates;
   private QueryAST queryAst;
-
+  private Map<TimeRange, Map<Candidate, TimeRange>> splitTimeRangeMap = 
Maps.newHashMap();
   public UnionCandidate(List<Candidate> childCandidates, CubeQueryContext 
cubeql) {
     this.childCandidates = childCandidates;
     //this.alias = alias;
@@ -56,7 +58,7 @@ public class UnionCandidate implements Candidate {
 
   @Override
   public boolean isTimeRangeCoverable(TimeRange timeRange) throws 
LensException {
-    Map<Candidate, TimeRange> candidateRange = 
splitTimeRangeForChildren(timeRange);
+    Map<Candidate, TimeRange> candidateRange = getTimeRangeSplit(timeRange);
     for (Map.Entry<Candidate, TimeRange> entry : candidateRange.entrySet()) {
       if (!entry.getKey().isTimeRangeCoverable(entry.getValue())) {
         return false;
@@ -106,8 +108,10 @@ public class UnionCandidate implements Candidate {
   @Override
   public double getCost() {
     double cost = 0.0;
-    for (Candidate cand : childCandidates) {
-      cost += cand.getCost();
+    for (TimeRange timeRange : cubeql.getTimeRanges()) {
+      for (Map.Entry<Candidate, TimeRange> entry : 
getTimeRangeSplit(timeRange).entrySet()) {
+        cost += entry.getKey().getCost() * entry.getValue().milliseconds() / 
timeRange.milliseconds();
+      }
     }
     return cost;
   }
@@ -137,7 +141,7 @@ public class UnionCandidate implements Candidate {
   @Override
   public boolean evaluateCompleteness(TimeRange timeRange, TimeRange 
parentTimeRange, boolean failOnPartialData)
     throws LensException {
-    Map<Candidate, TimeRange> candidateRange = 
splitTimeRangeForChildren(timeRange);
+    Map<Candidate, TimeRange> candidateRange = getTimeRangeSplit(timeRange);
     boolean ret = true;
     for (Map.Entry<Candidate, TimeRange> entry : candidateRange.entrySet()) {
       ret &= entry.getKey().evaluateCompleteness(entry.getValue(), 
parentTimeRange, failOnPartialData);
@@ -192,12 +196,7 @@ public class UnionCandidate implements Candidate {
    * @return
    */
   private Map<Candidate, TimeRange> splitTimeRangeForChildren(TimeRange 
timeRange) {
-    Collections.sort(childCandidates, new Comparator<Candidate>() {
-      @Override
-      public int compare(Candidate o1, Candidate o2) {
-        return o1.getCost() < o2.getCost() ? -1 : o1.getCost() == o2.getCost() 
? 0 : 1;
-      }
-    });
+    childCandidates.sort(Comparator.comparing(Candidate::getCost));
     Map<Candidate, TimeRange> childrenTimeRangeMap = new HashMap<>();
     // Sorted list based on the weights.
     Set<TimeRange> ranges = new HashSet<>();
@@ -212,6 +211,9 @@ public class UnionCandidate implements Candidate {
     }
     return childrenTimeRangeMap;
   }
+  private Map<Candidate, TimeRange> getTimeRangeSplit(TimeRange range) {
+    return splitTimeRangeMap.computeIfAbsent(range, 
this::splitTimeRangeForChildren);
+  }
 
   /**
    * Resolves the time range for this candidate based on overlap.

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
index f2325f1..3ee817f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
@@ -498,7 +498,8 @@ public class UnionQueryWriter {
       ASTNode child = (ASTNode) selectAST.getChild(i);
       ASTNode outerSelect = new ASTNode(child);
       ASTNode selectExprAST = (ASTNode) child.getChild(0);
-      ASTNode outerAST = getOuterAST(selectExprAST, innerSelectAST, 
aliasDecider, sc, true, cubeql.getBaseCube().getDimAttributeNames());
+      ASTNode outerAST = getOuterAST(selectExprAST, innerSelectAST, 
aliasDecider, sc, true,
+          cubeql.getBaseCube().getDimAttributeNames());
       outerSelect.addChild(outerAST);
       // has an alias? add it
       if (child.getChildCount() > 1) {
@@ -533,7 +534,8 @@ public class UnionQueryWriter {
    5. If given ast is memorized as mentioned in the above cases, return the 
mapping.
  */
   private ASTNode getOuterAST(ASTNode astNode, ASTNode innerSelectAST,
-      AliasDecider aliasDecider, StorageCandidate sc, boolean isSelectAst, 
Set<String> dimensionSet) throws LensException {
+      AliasDecider aliasDecider, StorageCandidate sc, boolean isSelectAst, 
Set<String> dimensionSet)
+      throws LensException {
     if (astNode == null) {
       return null;
     }
@@ -701,6 +703,7 @@ public class UnionQueryWriter {
     StringBuilder from = new StringBuilder();
     List<String> hqlQueries = new ArrayList<>();
     for (StorageCandidate sc : storageCandidates) {
+      removeAggreagateFromDefaultColumns(sc.getQueryAst().getSelectAST());
       Set<Dimension> queriedDims = factDimMap.get(sc);
       hqlQueries.add(sc.toHQL(queriedDims));
     }
@@ -709,4 +712,19 @@ public class UnionQueryWriter {
         .append(" ) as " + cubeql.getBaseCube()).toString();
   }
 
+  private void removeAggreagateFromDefaultColumns(ASTNode node) throws 
LensException {
+    for (int i = 0; i < node.getChildCount(); i++) {
+      ASTNode selectExpr = (ASTNode) node.getChild(i);
+      if (selectExpr.getChildCount() == 2) {
+        ASTNode column = (ASTNode) selectExpr.getChild(0);
+        if (HQLParser.isAggregateAST(column)
+            && column.getChildCount() == 2) {
+          if (HQLParser.getString((ASTNode) column.getChild(1)).equals("0.0")) 
{
+            selectExpr.getParent().setChild(i, getSelectExpr(null, (ASTNode) 
selectExpr.getChild(1), true));
+          }
+        }
+      }
+    }
+
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java 
b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
index f4049f5..caf8770 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
@@ -49,7 +49,7 @@ public final class RewriterPlan extends DriverQueryPlan {
     for (CubeQueryContext ctx : cubeQueries) {
       if (ctx.getPickedDimTables() != null && 
!ctx.getPickedDimTables().isEmpty()) {
         for (CandidateDim dim : ctx.getPickedDimTables()) {
-          addTablesQueried(dim.getStorageName());
+          addTablesQueried(dim.getStorageTable());
           if (partitions.get(dim.getName()) == null || 
partitions.get(dim.getName()).isEmpty()) {
             // puts storage table to latest part
             partitions.put(dim.getName(), dim.getParticipatingPartitions());
@@ -59,10 +59,10 @@ public final class RewriterPlan extends DriverQueryPlan {
       if (ctx.getPickedCandidate() != null) {
         for (StorageCandidate sc : 
CandidateUtil.getStorageCandidates(ctx.getPickedCandidate())) {
           addTablesQueried(sc.getAliasForTable(""));
-          Set<FactPartition> factParts = (Set<FactPartition>) 
partitions.get(sc.getName());
+          Set<FactPartition> factParts = (Set<FactPartition>) 
partitions.get(sc.getStorageTable());
           if (factParts == null) {
             factParts = new HashSet<FactPartition>();
-            partitions.put(sc.getName(), factParts);
+            partitions.put(sc.getStorageTable(), factParts);
           }
           factParts.addAll((Set<FactPartition>) 
sc.getParticipatingPartitions());
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 62d7386..033264c 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -141,7 +141,7 @@ public class CubeTestSetup {
         innerWhere, innerPostWhere, null, 
provider.providePartitionsForStorage(storage)));
       sep = " UNION ALL ";
     }
-    return sb.append(") ").append(cubeName).append(" ").append(outerWhere == 
null ? "" : outerWhere)
+    return sb.append(") ").append(" as ").append(cubeName).append(" 
").append(outerWhere == null ? "" : outerWhere)
       .append(" ").append(outerPostWhere == null ? "" : 
outerPostWhere).toString();
   }
   public static String getExpectedUnionQuery(String cubeName, List<String> 
storages, StoragePartitionProvider provider,
@@ -333,9 +333,9 @@ public class CubeTestSetup {
     return updatePeriodToWhereMap;
   }
 
-  // storageTables[0] is hourly
-  // storageTables[1] is daily
-  // storageTables[2] is monthly
+  // storageName[0] is hourly
+  // storageName[1] is daily
+  // storageName[2] is monthly
   public static Map<String, String> 
getWhereForMonthlyDailyAndHourly2months(String... storageTables) {
     Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, 
String>();
     List<String> hourlyparts = new ArrayList<String>();
@@ -778,28 +778,33 @@ public class CubeTestSetup {
 
   private void dump(CubeMetastoreClient client) throws LensException, 
IOException {
 //    for (CubeInterface cubeInterface : client.getAllCubes()) {
-//      String path = getClass().getResource("/schema/cubes/" + 
((cubeInterface instanceof Cube) ? "base" : "derived")).getPath() + "/" + 
cubeInterface.getName() + ".xml";
+//      String path = getClass().getResource("/schema/cubes/" + 
((cubeInterface instanceof Cube) ? "base"
+// : "derived")).getPath() + "/" + cubeInterface.getName() + ".xml";
 //      try(BufferedWriter bw = new BufferedWriter(new FileWriter(path))) {
 //        
bw.write(ToXMLString.toString(JAXBUtils.xCubeFromHiveCube(cubeInterface)));
 //      }
 //    }
     for (CubeFactTable cubeFactTable : client.getAllFacts()) {
-      try(BufferedWriter bw = new BufferedWriter(new 
FileWriter(getClass().getResource("/schema/facts").getPath()+"/"+cubeFactTable.getName()+".xml")))
 {
+      try(BufferedWriter bw = new BufferedWriter(new FileWriter(getClass()
+          
.getResource("/schema/facts").getPath()+"/"+cubeFactTable.getName()+".xml"))) {
         bw.write(ToXMLString.toString(client.getXFactTable(cubeFactTable)));
       }
     }
 //    for (Dimension dim : client.getAllDimensions()) {
-//      try(BufferedWriter bw = new BufferedWriter(new 
FileWriter(getClass().getResource("/schema/dimensions").getPath()+"/"+dim.getName()+".xml")))
 {
+//      try(BufferedWriter bw = new BufferedWriter(new FileWriter(getClass()
+// .getResource("/schema/dimensions").getPath()+"/"+dim.getName()+".xml"))) {
 //        
bw.write(ToXMLString.toString(JAXBUtils.xdimensionFromDimension(dim)));
 //      }
 //    }
     for (CubeDimensionTable dim : client.getAllDimensionTables()) {
-      try(BufferedWriter bw = new BufferedWriter(new 
FileWriter(getClass().getResource("/schema/dimtables").getPath()+"/"+dim.getName()+".xml")))
 {
+      try(BufferedWriter bw = new BufferedWriter(new FileWriter(getClass()
+          
.getResource("/schema/dimtables").getPath()+"/"+dim.getName()+".xml"))) {
         bw.write(ToXMLString.toString(client.getXDimensionTable(dim)));
       }
     }
 //    for (Storage storage : client.getAllStorages()) {
-//      try(BufferedWriter bw = new BufferedWriter(new 
FileWriter(getClass().getResource("/schema/storages").getPath()+"/"+storage.getName()+".xml")))
 {
+//      try(BufferedWriter bw = new BufferedWriter(new FileWriter(getClass()
+// .getResource("/schema/storages").getPath()+"/"+storage.getName()+".xml"))) {
 //        
bw.write(ToXMLString.toString(JAXBUtils.xstorageFromStorage(storage)));
 //      }
 //    }
@@ -961,4 +966,4 @@ public class CubeTestSetup {
     System.out.println("--query- " + query);
     HQLParser.printAST(HQLParser.parseHQL(query, new HiveConf()));
   }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/cb5e2a77/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 1e5d05f..3e08740 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -223,7 +223,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     Candidate candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(),
+        ((StorageCandidate) candidate).getStorageTable().toLowerCase());
     String expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
testCube.msr2 as `msr2` from ", null, null,
         getWhereForHourly2days("c1_testfact2_raw"));
@@ -286,7 +287,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     Candidate candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     String expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
avg(testCube.msr2) as `avg(testCube.msr2)` "
           + "from ", null, "group by testcube.cityid", 
getWhereForHourly2days("c1_testfact2_raw"));
@@ -298,7 +300,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr2) as `sum(testCube.msr2)` "
@@ -310,7 +313,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
testCube.msr2 as `msr2` from ",
@@ -322,7 +326,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr2)  as `sum(testCube.msr2)` "
@@ -334,7 +339,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr2) as `sum(testCube.msr2)` "
@@ -346,7 +352,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr2)  as `sum(testCube.msr2)` "
@@ -359,7 +366,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr2) as `sum(testCube.msr2)` "
@@ -372,7 +380,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT distinct testcube.cityid as `cityid`, 
round(testCube.msr2)  "
@@ -384,7 +393,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
count(distinct testCube.msr2) "
@@ -398,7 +408,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
round(testCube.msr1) as `round(testCube.msr1)` "
@@ -410,7 +421,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT distinct testcube.cityid as `cityid`, 
round(testCube.msr1) "
@@ -422,7 +434,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
count(distinct testCube.msr1) "
@@ -435,7 +448,8 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
     Assert.assertEquals(1, cubeql.getCandidates().size());
     candidate = cubeql.getCandidates().iterator().next();
     Assert.assertTrue(candidate instanceof StorageCandidate);
-    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate).getName().toLowerCase());
+    Assert.assertEquals("c1_testFact2_raw".toLowerCase(), ((StorageCandidate) 
candidate)
+        .getStorageTable().toLowerCase());
     hQL = cubeql.toHQL();
     expectedQL =
       getExpectedQuery(cubeName, "SELECT testcube.cityid as `cityid`, 
sum(testCube.msr1) as `sum(testCube.msr1)` "

Reply via email to