LENS-198 : Allow timed dimensions of cube to be non-partitioned columns as well 
(Rajat Khandelwal via amareshwari)


Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/dc1fafa9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/dc1fafa9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/dc1fafa9

Branch: refs/heads/master
Commit: dc1fafa91c8407db8e0f1bfdde24df8f5fec7bce
Parents: 6f8ad21
Author: Rajat Khandelwal <[email protected]>
Authored: Thu May 14 10:09:46 2015 +0530
Committer: Amareshwari Sriramadasu <[email protected]>
Committed: Thu May 14 10:09:46 2015 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |  39 ++-
 .../lens/cube/metadata/CubeMetastoreClient.java |   6 +
 .../lens/cube/metadata/FactPartition.java       |   6 +
 .../lens/cube/metadata/MetastoreConstants.java  |   1 +
 .../lens/cube/metadata/TimePartition.java       |   8 +
 .../lens/cube/metadata/TimePartitionRange.java  |   4 +
 .../cube/metadata/TimePartitionRangeList.java   |  38 +++
 .../metadata/timeline/PartitionTimeline.java    |  16 ++
 .../timeline/RangesPartitionTimeline.java       |  43 ++-
 .../apache/lens/cube/parse/AliasReplacer.java   |   4 +-
 .../apache/lens/cube/parse/CandidateFact.java   |  16 +-
 .../cube/parse/CandidateTablePruneCause.java    |   2 +
 .../lens/cube/parse/CubeQueryContext.java       |  39 ++-
 .../org/apache/lens/cube/parse/DateUtil.java    | 157 ++++++-----
 .../cube/parse/DenormalizationResolver.java     |   6 +-
 .../apache/lens/cube/parse/DimHQLContext.java   |  14 +-
 .../lens/cube/parse/DimOnlyHQLContext.java      |   7 +-
 .../lens/cube/parse/ExpressionResolver.java     |   2 +-
 .../apache/lens/cube/parse/FactHQLContext.java  |   8 -
 .../apache/lens/cube/parse/FieldValidator.java  |   2 +-
 .../apache/lens/cube/parse/JoinResolver.java    |   4 +-
 .../cube/parse/MaxCoveringFactResolver.java     |  70 +++--
 .../PartitionRangesForPartitionColumns.java     |  55 ++++
 .../lens/cube/parse/SingleFactHQLContext.java   |  10 +-
 .../lens/cube/parse/StorageTableResolver.java   | 128 ++++++---
 .../org/apache/lens/cube/parse/TimeRange.java   |  16 ++
 .../lens/cube/parse/TimerangeResolver.java      |   1 -
 .../apache/lens/cube/parse/CubeTestSetup.java   |  62 ++---
 .../lens/cube/parse/TestAggregateResolver.java  |  15 +-
 .../lens/cube/parse/TestBaseCubeQueries.java    |  99 ++++++-
 .../lens/cube/parse/TestCubeRewriter.java       | 264 +++++++++----------
 .../apache/lens/cube/parse/TestDateUtil.java    |  38 +++
 .../cube/parse/TestDenormalizationResolver.java |   8 +-
 .../lens/cube/parse/TestQueryRewrite.java       |  10 +
 .../lens/cube/parse/TestRewriterPlan.java       |  23 +-
 .../lens/cube/parse/TestTimeRangeExtractor.java |  14 +-
 .../lens/examples/PopulateSampleMetastore.java  |   1 -
 .../src/main/resources/cube-queries.sql         |   2 +
 lens-examples/src/main/resources/customer.xml   |   2 +-
 .../resources/sales-aggr-fact2-local-parts.xml  |   6 +
 .../src/main/resources/sales-aggr-fact2.xml     |   1 +
 lens-examples/src/main/resources/sales-cube.xml |  10 +-
 42 files changed, 856 insertions(+), 401 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd 
b/lens-api/src/main/resources/cube-0.1.xsd
index 10b1dfa..06f85ee 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -61,6 +61,39 @@
             1. cube.allfields.queriable - (defaults to true) - If set to true, 
all the fields (measures, attributes) are
             queryable together. If set to false, there should be derived cubes 
defined that should group the queriable
             fields together.
+
+            Additional properties:
+
+            1. cube.timedim.relation.{time_dim1}: 
time_dim2+[timediff1,timediff2]. It's assumed that
+              timediff1 is smaller than timediff2. Means that time_dim1 can be 
expected to be between
+              [time_dim2+timediff1 and time_dim2+timediff2]. One use case 
would be the following:
+
+              1.1. if a query is on range of time_dim1 and
+                time_dim1 has no partitioning column in some fact, then 
partitions for time_dim2 can be looked at.
+                Let's say time_dim2's part col is part_col2, then for a query 
asking for time_dim1 between [a,b) can
+                be answered by looking at partitions of part_col2 in range 
[a-timediff2, b-timediff1).
+
+              This property is first looked into fact properties, then cube 
properties and if that's a derived cube,
+              then next in base cube properties. Wherever found first, that 
will be considered as the final relation
+              between time dimensions.
+
+              Time dimension relations are transitive, but not reversible. i.e.
+              cube.timedim.relation.time_dim1 = time_dim2 + [a, b]
+              cube.timedim.relation.time_dim2 = time_dim3 + [c, d]
+
+              implies:
+
+              cube.timedim.relation.time_dim1 = time_dim3 + [a+c, b+d]
+
+              but not:
+
+              cube.timedim.relation.time_dim2 = time_dim1 + [-b, -a]
+
+              Reverse relations have to be defined explicitly.
+
+              Timediff syntax is sign, quantity and unit. Spaces in between 
can be present. e.g. -4 days, +4days, +4 day
+              etc all are valid.
+
           </xs:documentation>
         </xs:annotation>
       </xs:element>
@@ -752,7 +785,7 @@
           </xs:documentation>
         </xs:annotation>
       </xs:element>
-      <xs:element name="skewed_info" type="x_skewed_info">
+      <xs:element name="skewed_info" type="x_skewed_info" 
maxOccurs="unbounded" minOccurs="0">
         <xs:annotation>
           <xs:documentation>
             Skewed info, if the table is skewed. Expressed as
@@ -1090,8 +1123,8 @@
 
             1. cube.fact.{fact-name}.valid.columns : comma separated column 
names which are valid in the fact.
             2. cube.fact.is.aggregated : Defaults to true. If the fact is a 
raw fact, this should be set to false,
-             otherwise true.
-
+              otherwise true.
+            3. cube.timedim.relation.{time_dim1}: See the same property in 
cube. Fact tables can override the property.
           </xs:documentation>
         </xs:annotation>
       </xs:element>

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 01d76c0..08a63b8 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -230,6 +230,12 @@ public class CubeMetastoreClient {
       for (Partition partition : getPartitionsByFilter(storageTableName, 
null)) {
         UpdatePeriod period = deduceUpdatePeriod(partition);
         List<String> values = partition.getValues();
+        if (values.contains(StorageConstants.LATEST_PARTITION_VALUE)) {
+          log.info("dropping latest partition from fact storage table: " + 
storageTableName
+            + ". Spec: " + partition.getSpec());
+          getClient().dropPartition(storageTableName, values, false);
+          continue;
+        }
         for (int i = 0; i < partCols.size(); i++) {
           if (timeParts.contains(partCols.get(i).getName())) {
             addForBatchAddition(storageTableName, period, 
partCols.get(i).getName(), values.get(i));

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
index 8a6dcba..3465513 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/FactPartition.java
@@ -24,6 +24,8 @@ import java.util.Date;
 import java.util.LinkedHashSet;
 import java.util.Set;
 
+import org.apache.lens.server.api.error.LensException;
+
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
 import lombok.Setter;
@@ -160,4 +162,8 @@ public class FactPartition implements 
Comparable<FactPartition> {
     }
     return colComp;
   }
+
+  public TimePartition getTimePartition() throws LensException {
+    return TimePartition.of(getPeriod(), getPartSpec());
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
index 74a26fe..1fe55d9 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
@@ -108,4 +108,5 @@ public final class MetastoreConstants {
   public static final String PARTITION_UPDATE_PERIOD_SFX = 
".partition.update.period";
   public static final String PARTITION_UPDATE_PERIOD = 
"cube.storagetable.partition.update.period";
   public static final String TIMEDIM_TO_PART_MAPPING_PFX = 
"cube.timedim.partition.";
+  public static final String TIMEDIM_RELATION = "cube.timedim.relation.";
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
index f644c16..7c966d8 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartition.java
@@ -67,6 +67,10 @@ public class TimePartition implements 
Comparable<TimePartition>, Named {
     }
   }
 
+  public TimePartition withUpdatePeriod(UpdatePeriod period) throws 
LensException {
+    return TimePartition.of(period, getDate());
+  }
+
   public String toString() {
     return dateString;
   }
@@ -141,4 +145,8 @@ public class TimePartition implements 
Comparable<TimePartition>, Named {
   public String getName() {
     return getDateString();
   }
+
+  public TimePartitionRange emptyRange() throws LensException {
+    return this.rangeUpto(this);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
index 1837ae9..e16294e 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRange.java
@@ -127,4 +127,8 @@ public class TimePartitionRange implements 
Iterable<TimePartition>, Named {
   public long size() {
     return DateUtil.getTimeDiff(begin.getDate(), end.getDate(), 
begin.getUpdatePeriod());
   }
+
+  public boolean isValidAndNonEmpty() {
+    return begin.before(end);
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRangeList.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRangeList.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRangeList.java
new file mode 100644
index 0000000..9c6759e
--- /dev/null
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/TimePartitionRangeList.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.metadata;
+
+import java.util.ArrayList;
+
+public class TimePartitionRangeList extends ArrayList<TimePartitionRange> {
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    String sep = "";
+    for (TimePartitionRange range : this) {
+      sb.append(sep).append(range);
+      sep = "U";
+    }
+    return sb.toString();
+  }
+
+  public TimePartitionRange last() {
+    return get(size() - 1);
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
index 8a533fc..d27e43e 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
@@ -23,6 +23,7 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.MetastoreUtil;
 import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.TimePartitionRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.server.api.error.LensException;
 
@@ -146,6 +147,21 @@ public abstract class PartitionTimeline implements 
Iterable<TimePartition> {
     // Can also return the failed to add items.
     return result;
   }
+  /**
+   * Add partition range to the timeline. Default implementation is to iterate 
over the range and add
+   * each time partition belonging to the given range. Implementing classes 
can override.
+   *
+   * @param partitionRange
+   * @return whether add was successful
+   * @throws LensException
+   */
+  boolean add(TimePartitionRange partitionRange) throws LensException {
+    boolean ret = true;
+    for (TimePartition part : partitionRange) {
+      ret &= add(part);
+    }
+    return ret;
+  }
 
   /**
    * drop partition.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
index 6646718..1b9a44a 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/RangesPartitionTimeline.java
@@ -21,17 +21,12 @@ package org.apache.lens.cube.metadata.timeline;
 
 import java.util.HashMap;
 import java.util.Iterator;
-import java.util.List;
 import java.util.Map;
 
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.cube.metadata.TimePartition;
-import org.apache.lens.cube.metadata.TimePartitionRange;
-import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.cube.metadata.*;
 import org.apache.lens.server.api.error.LensException;
 
 import com.google.common.base.Strings;
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import lombok.Data;
 import lombok.EqualsAndHashCode;
@@ -46,7 +41,7 @@ import lombok.ToString;
 @EqualsAndHashCode(callSuper = true)
 @ToString(callSuper = true)
 public class RangesPartitionTimeline extends PartitionTimeline {
-  private List<TimePartitionRange> ranges = Lists.newArrayList();
+  private TimePartitionRangeList ranges = new TimePartitionRangeList();
 
   public RangesPartitionTimeline(String storageTableName, UpdatePeriod 
updatePeriod,
     String partCol) {
@@ -89,6 +84,37 @@ public class RangesPartitionTimeline extends 
PartitionTimeline {
     return true;
   }
 
+  @Override
+  public boolean add(TimePartitionRange partitionRange) throws LensException {
+    // Adding partition range to the timeline. Will have to find if any of the 
sub ranges
+    // intersects. If yes, add only remaining sub ranges, else add the given 
range as a new sub range.
+    int strictlyAfterIndex = getStrictlyAfterIndex(partitionRange.getBegin());
+    while (strictlyAfterIndex < ranges.size() && 
partitionRange.isValidAndNonEmpty()) {
+      if 
(partitionRange.getEnd().before(ranges.get(strictlyAfterIndex).getBegin())) {
+        // partition begin and end both are strictly before 
ranges[strictlyAfterIndex]. Add as new sub range.
+        ranges.add(strictlyAfterIndex, partitionRange);
+        partitionRange = partitionRange.getEnd().emptyRange();
+        break;
+      } else {
+        // begin is before ranges[strictlyAfterIndex], end is not.
+        // extend ranges[strictlyAfterIndex] and add remaining range, if any.
+        ranges.get(strictlyAfterIndex).setBegin(partitionRange.getBegin());
+        if 
(ranges.get(strictlyAfterIndex).getEnd().before(partitionRange.getEnd())) {
+          partitionRange = 
ranges.get(strictlyAfterIndex).getEnd().rangeUpto(partitionRange.getEnd());
+        } else {
+          // No remaining range, end was before ranges[strictlyAfterIndex].end
+          partitionRange = 
ranges.get(strictlyAfterIndex).getEnd().emptyRange();
+        }
+        strictlyAfterIndex++;
+      }
+    }
+    if (strictlyAfterIndex == ranges.size() && 
partitionRange.isValidAndNonEmpty()) {
+      ranges.add(partitionRange);
+    }
+    mergeRanges();
+    return true;
+  }
+
   private int getStrictlyAfterIndex(TimePartition part) {
     int start = 0;
     int end = getRanges().size();
@@ -110,6 +136,9 @@ public class RangesPartitionTimeline extends 
PartitionTimeline {
         TimePartitionRange removed = ranges.remove(i + 1);
         ranges.get(i).setEnd(removed.getEnd());
         i--; // check again at same index
+      } else if (ranges.get(i).isEmpty()) {
+        ranges.remove(i);
+        i--;
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
index a65bc96..9d367c3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/AliasReplacer.java
@@ -136,7 +136,7 @@ class AliasReplacer implements ContextRewriter {
       if (cubeql.getCube() != null) {
         Set<String> cols = cubeql.getCube().getAllFieldNames();
         if (cols.contains(col.toLowerCase())) {
-          colToTableAlias.put(col.toLowerCase(), 
cubeql.getAliasForTabName(cubeql.getCube().getName()));
+          colToTableAlias.put(col.toLowerCase(), 
cubeql.getAliasForTableName(cubeql.getCube().getName()));
           cubeql.addColumnsQueried((AbstractCubeTable) cubeql.getCube(), 
col.toLowerCase());
           inCube = true;
         }
@@ -148,7 +148,7 @@ class AliasReplacer implements ContextRewriter {
             if (prevDim != null && !prevDim.equals(dim.getName())) {
               throw new SemanticException(ErrorMsg.AMBIGOUS_DIM_COLUMN, col, 
prevDim, dim.getName());
             }
-            colToTableAlias.put(col.toLowerCase(), 
cubeql.getAliasForTabName(dim.getName()));
+            colToTableAlias.put(col.toLowerCase(), 
cubeql.getAliasForTableName(dim.getName()));
             cubeql.addColumnsQueried(dim, col.toLowerCase());
           } else {
             // throw error because column is in both cube and dimension table

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
index 47b9fc1..31eb113 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateFact.java
@@ -42,6 +42,9 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.antlr.runtime.CommonToken;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
 import lombok.Getter;
 import lombok.Setter;
 
@@ -57,22 +60,23 @@ public class CandidateFact implements CandidateTable {
   // flag to know if querying multiple storage tables is enabled for this fact
   @Getter
   @Setter
-  private  boolean enabledMultiTableSelect;
+  private boolean enabledMultiTableSelect;
   @Getter
   private int numQueriedParts = 0;
   @Getter
-  private final Set<FactPartition> partsQueried = new HashSet<FactPartition>();
+  private final Set<FactPartition> partsQueried = Sets.newHashSet();
   @Getter
-  private final Map<TimeRange, String> rangeToWhereClause = new 
HashMap<TimeRange, String>();
+  private final Map<TimeRange, String> rangeToWhereClause = Maps.newHashMap();
+
   private boolean dbResolved = false;
   private CubeInterface baseTable;
   private ASTNode selectAST;
   private ASTNode whereAST;
   private ASTNode groupbyAST;
   private ASTNode havingAST;
-  private List<TimeRangeNode> timenodes = new ArrayList<TimeRangeNode>();
-  private final List<Integer> selectIndices = new ArrayList<Integer>();
-  private final List<Integer> dimFieldIndices = new ArrayList<Integer>();
+  private List<TimeRangeNode> timenodes = Lists.newArrayList();
+  private final List<Integer> selectIndices = Lists.newArrayList();
+  private final List<Integer> dimFieldIndices = Lists.newArrayList();
   private Collection<String> columns;
 
   CandidateFact(CubeFactTable fact, CubeInterface cube) {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
index 165525d..a1fea16 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTablePruneCause.java
@@ -36,6 +36,8 @@ import lombok.NoArgsConstructor;
 
 public class CandidateTablePruneCause {
 
+
+
   public enum CandidateTablePruneCode {
     MORE_WEIGHT("Picked table had more weight than minimum."),
     // partial data is enabled, another fact has more data.

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index b7c3ee9..230a2ee 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.hive.ql.parse.*;
 
 import org.codehaus.jackson.map.ObjectMapper;
 
+import com.google.common.collect.Sets;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.ToString;
@@ -310,6 +311,21 @@ public class CubeQueryContext {
     return autoJoinCtx != null && autoJoinCtx.isJoinsResolved();
   }
 
+  public Cube getBaseCube() {
+    if (cube instanceof Cube) {
+      return (Cube) cube;
+    }
+    return ((DerivedCube) cube).getParent();
+  }
+
+  public Set<String> getPartitionColumnsQueried() {
+    Set<String> partsQueried = Sets.newHashSet();
+    for (TimeRange range : getTimeRanges()) {
+      partsQueried.add(range.getPartitionColumn());
+    }
+    return partsQueried;
+  }
+
   // Holds the context of optional dimension
   // A dimension is optional if it is not queried directly by the user, but is
   // required by a candidate table to get a denormalized field from reference
@@ -384,20 +400,24 @@ public class CubeQueryContext {
     dimMsgs.addPruningMsg(dimtable, msg);
   }
 
-  public String getAliasForTabName(String tabName) {
+  public String getAliasForTableName(Named named) {
+    return getAliasForTableName(named.getName());
+  }
+
+  public String getAliasForTableName(String tableName) {
     for (String alias : qb.getTabAliases()) {
       String table = qb.getTabNameForAlias(alias);
-      if (table != null && table.equalsIgnoreCase(tabName)) {
+      if (table != null && table.equalsIgnoreCase(tableName)) {
         return alias;
       }
     }
     // get alias from cubeTbls
     for (Map.Entry<String, AbstractCubeTable> cubeTblEntry : 
cubeTbls.entrySet()) {
-      if (cubeTblEntry.getValue().getName().equalsIgnoreCase(tabName)) {
+      if (cubeTblEntry.getValue().getName().equalsIgnoreCase(tableName)) {
         return cubeTblEntry.getKey();
       }
     }
-    return tabName;
+    return tableName;
   }
 
   public void print() {
@@ -581,13 +601,13 @@ public class CubeQueryContext {
     String fromString = null;
     if (getJoinTree() == null) {
       if (cube != null) {
-        fromString = fact.getStorageString(getAliasForTabName(cube.getName()));
+        fromString = 
fact.getStorageString(getAliasForTableName(cube.getName()));
       } else {
         if (dimensions.size() != 1) {
           throw new SemanticException(ErrorMsg.NO_JOIN_CONDITION_AVAIABLE);
         }
         Dimension dim = dimensions.iterator().next();
-        fromString = 
dimsToQuery.get(dim).getStorageString(getAliasForTabName(dim.getName()));
+        fromString = 
dimsToQuery.get(dim).getStorageString(getAliasForTableName(dim.getName()));
       }
     } else {
       StringBuilder builder = new StringBuilder();
@@ -843,11 +863,11 @@ public class CubeQueryContext {
   }
 
   public Set<String> getColumnsQueried(String tblName) {
-    return tblAliasToColumns.get(getAliasForTabName(tblName));
+    return tblAliasToColumns.get(getAliasForTableName(tblName));
   }
 
   public void addColumnsQueried(AbstractCubeTable table, String column) {
-    addColumnsQueried(getAliasForTabName(table.getName()), column);
+    addColumnsQueried(getAliasForTableName(table.getName()), column);
   }
 
   public void addColumnsQueried(String alias, String column) {
@@ -876,7 +896,8 @@ public class CubeQueryContext {
     } else {
       String cubeName = split[0].trim();
       String colName = split[1].trim();
-      if (cubeName.equalsIgnoreCase(cube.getName()) || 
cubeName.equalsIgnoreCase(getAliasForTabName(cube.getName()))) {
+      if (cubeName.equalsIgnoreCase(cube.getName())
+        || cubeName.equalsIgnoreCase(getAliasForTableName(cube.getName()))) {
         return cube.getMeasureNames().contains(colName.toLowerCase());
       } else {
         return false;

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
index 82c43a7..b11de10 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DateUtil.java
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
+import static java.util.Calendar.*;
+
 import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
@@ -36,6 +38,7 @@ import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.log4j.Logger;
 
 import lombok.Data;
+import lombok.EqualsAndHashCode;
 
 public final class DateUtil {
   private DateUtil() {
@@ -76,6 +79,7 @@ public final class DateUtil {
   public static final String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
   public static final String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
   public static final String ABSDATE_FMT = "yyyy-MM-dd-HH:mm:ss,SSS";
+  public static final String HIVE_QUERY_DATE_FMT = "yyyy-MM-dd HH:mm:ss";
 
   public static final ThreadLocal<DateFormat> ABSDATE_PARSER =
     new ThreadLocal<DateFormat>() {
@@ -84,6 +88,13 @@ public final class DateUtil {
         return new SimpleDateFormat(ABSDATE_FMT);
       }
     };
+  public static final ThreadLocal<DateFormat> HIVE_QUERY_DATE_PARSER =
+    new ThreadLocal<DateFormat>() {
+      @Override
+      protected SimpleDateFormat initialValue() {
+        return new SimpleDateFormat(HIVE_QUERY_DATE_FMT);
+      }
+    };
 
   public static String formatDate(Date dt) {
     return ABSDATE_PARSER.get().format(dt);
@@ -141,14 +152,14 @@ public final class DateUtil {
       if (granularityMatcher.find()) {
         String unit = granularityMatcher.group().toLowerCase();
         if ("year".equals(unit)) {
-          calendar = DateUtils.truncate(calendar, Calendar.YEAR);
+          calendar = DateUtils.truncate(calendar, YEAR);
         } else if ("month".equals(unit)) {
-          calendar = DateUtils.truncate(calendar, Calendar.MONTH);
+          calendar = DateUtils.truncate(calendar, MONTH);
         } else if ("week".equals(unit)) {
           calendar.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
-          calendar = DateUtils.truncate(calendar, Calendar.DAY_OF_MONTH);
+          calendar = DateUtils.truncate(calendar, DAY_OF_MONTH);
         } else if ("day".equals(unit)) {
-          calendar = DateUtils.truncate(calendar, Calendar.DAY_OF_MONTH);
+          calendar = DateUtils.truncate(calendar, DAY_OF_MONTH);
         } else if ("hour".equals(unit)) {
           calendar = DateUtils.truncate(calendar, Calendar.HOUR_OF_DAY);
         } else if ("minute".equals(unit)) {
@@ -162,50 +173,9 @@ public final class DateUtil {
     }
 
     // Get rid of 'now' part and whitespace
-    String raw = str.replaceAll(GRANULARITY, "").replace(WSPACE, "");
-
-    if (raw.isEmpty()) { // String is just "now" with granularity
-      return calendar.getTime();
-    }
-
-    // Get the relative diff part to get eventual date based on now.
-    Matcher qtyMatcher = P_QUANTITY.matcher(raw);
-    int qty = 1;
-    if (qtyMatcher.find()) {
-      qty = Integer.parseInt(qtyMatcher.group());
-    }
-
-    Matcher signageMatcher = P_SIGNAGE.matcher(raw);
-    if (signageMatcher.find()) {
-      String sign = signageMatcher.group();
-      if ("-".equals(sign)) {
-        qty = -qty;
-      }
-    }
-
-    Matcher unitMatcher = P_UNIT.matcher(raw);
-    if (unitMatcher.find()) {
-      String unit = unitMatcher.group().toLowerCase();
-      if ("year".equals(unit)) {
-        calendar.add(Calendar.YEAR, qty);
-      } else if ("month".equals(unit)) {
-        calendar.add(Calendar.MONTH, qty);
-      } else if ("week".equals(unit)) {
-        calendar.add(Calendar.DAY_OF_MONTH, 7 * qty);
-      } else if ("day".equals(unit)) {
-        calendar.add(Calendar.DAY_OF_MONTH, qty);
-      } else if ("hour".equals(unit)) {
-        calendar.add(Calendar.HOUR_OF_DAY, qty);
-      } else if ("minute".equals(unit)) {
-        calendar.add(Calendar.MINUTE, qty);
-      } else if ("second".equals(unit)) {
-        calendar.add(Calendar.SECOND, qty);
-      } else {
-        throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
-      }
-    }
-
-    return calendar.getTime();
+    String diffStr = str.replaceAll(RELATIVE, "").replace(WSPACE, "");
+    TimeDiff diff = TimeDiff.parseFrom(diffStr);
+    return diff.offsetFrom(calendar.getTime());
   }
 
   public static Date getCeilDate(Date fromDate, UpdatePeriod interval) {
@@ -214,12 +184,12 @@ public final class DateUtil {
     boolean hasFraction = false;
     switch (interval) {
     case YEARLY:
-      if (cal.get(Calendar.MONTH) != 0) {
+      if (cal.get(MONTH) != 0) {
         hasFraction = true;
         break;
       }
     case MONTHLY:
-      if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
+      if (cal.get(DAY_OF_MONTH) != 1) {
         hasFraction = true;
         break;
       }
@@ -263,9 +233,9 @@ public final class DateUtil {
     cal.setTime(toDate);
     switch (interval) {
     case YEARLY:
-      cal.set(Calendar.MONTH, 0);
+      cal.set(MONTH, 0);
     case MONTHLY:
-      cal.set(Calendar.DAY_OF_MONTH, 1);
+      cal.set(DAY_OF_MONTH, 1);
     case DAILY:
       cal.set(Calendar.HOUR_OF_DAY, 0);
     case HOURLY:
@@ -289,20 +259,20 @@ public final class DateUtil {
   public static int getNumberofDaysInMonth(Date date) {
     Calendar calendar = Calendar.getInstance();
     calendar.setTime(date);
-    return calendar.getActualMaximum(Calendar.DAY_OF_MONTH);
+    return calendar.getActualMaximum(DAY_OF_MONTH);
   }
 
   public static CoveringInfo getMonthlyCoveringInfo(Date from, Date to) {
     // Move 'from' to end of month, unless its the first day of month
     boolean coverable = true;
-    if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
       coverable = false;
     }
 
     // Move 'to' to beginning of next month, unless its the first day of the 
month
-    if (!to.equals(DateUtils.truncate(to, Calendar.MONTH))) {
-      to = DateUtils.truncate(to, Calendar.MONTH);
+    if (!to.equals(DateUtils.truncate(to, MONTH))) {
+      to = DateUtils.truncate(to, MONTH);
       coverable = false;
     }
 
@@ -320,13 +290,13 @@ public final class DateUtil {
       return new CoveringInfo(0, false);
     }
     boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
       coverable = false;
     }
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
-    int fromMonth = cal.get(Calendar.MONTH);
+    int fromMonth = cal.get(MONTH);
 
     // Get the start date of the quarter
     int beginOffset = (3 - fromMonth % 3) % 3;
@@ -344,13 +314,13 @@ public final class DateUtil {
       return new CoveringInfo(0, false);
     }
     boolean coverable = monthlyCoveringInfo.isCoverable();
-    if (!from.equals(DateUtils.truncate(from, Calendar.MONTH))) {
-      from = DateUtils.addMonths(DateUtils.truncate(from, Calendar.MONTH), 1);
+    if (!from.equals(DateUtils.truncate(from, MONTH))) {
+      from = DateUtils.addMonths(DateUtils.truncate(from, MONTH), 1);
       coverable = false;
     }
     Calendar cal = Calendar.getInstance();
     cal.setTime(from);
-    int fromMonth = cal.get(Calendar.MONTH);
+    int fromMonth = cal.get(MONTH);
     int beginOffset = (12 - fromMonth % 12) % 12;
     int endOffset = (monthlyCoveringInfo.getCountBetween() - beginOffset) % 12;
     if (beginOffset > 0 || endOffset > 0) {
@@ -375,10 +345,10 @@ public final class DateUtil {
     cal.setTime(from);
     int fromWeek = cal.get(Calendar.WEEK_OF_YEAR);
     int fromDay = cal.get(Calendar.DAY_OF_WEEK);
-    int fromYear = cal.get(Calendar.YEAR);
+    int fromYear = cal.get(YEAR);
 
     cal.clear();
-    cal.set(Calendar.YEAR, fromYear);
+    cal.set(YEAR, fromYear);
     cal.set(Calendar.WEEK_OF_YEAR, fromWeek);
     cal.set(Calendar.DAY_OF_WEEK, Calendar.SUNDAY);
     int maxDayInWeek = cal.getActualMaximum(Calendar.DAY_OF_WEEK);
@@ -444,4 +414,61 @@ public final class DateUtil {
       this.coverable = coverable;
     }
   }
+  @EqualsAndHashCode
+  static class TimeDiff{
+    int quantity;
+    int calendarField;
+
+    public TimeDiff(int quantity, int calendarField) {
+      this.quantity = quantity;
+      this.calendarField = calendarField;
+    }
+
+    static TimeDiff parseFrom(String diffStr) throws SemanticException {
+      // Get the relative diff part to get eventual date based on now.
+      Matcher qtyMatcher = P_QUANTITY.matcher(diffStr);
+      int qty = 1;
+      if (qtyMatcher.find()) {
+        qty = Integer.parseInt(qtyMatcher.group());
+      }
+
+      Matcher signageMatcher = P_SIGNAGE.matcher(diffStr);
+      if (signageMatcher.find()) {
+        String sign = signageMatcher.group();
+        if ("-".equals(sign)) {
+          qty = -qty;
+        }
+      }
+
+      Matcher unitMatcher = P_UNIT.matcher(diffStr);
+      if (unitMatcher.find()) {
+        String unit = unitMatcher.group().toLowerCase();
+        if ("year".equals(unit)) {
+          return new TimeDiff(qty, YEAR);
+        } else if ("month".equals(unit)) {
+          return new TimeDiff(qty, MONTH);
+        } else if ("week".equals(unit)) {
+          return new TimeDiff(7 * qty, DAY_OF_MONTH);
+        } else if ("day".equals(unit)) {
+          return new TimeDiff(qty, DAY_OF_MONTH);
+        } else if ("hour".equals(unit)) {
+          return new TimeDiff(qty, HOUR_OF_DAY);
+        } else if ("minute".equals(unit)) {
+          return new TimeDiff(qty, MINUTE);
+        } else if ("second".equals(unit)) {
+          return new TimeDiff(qty, SECOND);
+        } else {
+          throw new SemanticException(ErrorMsg.INVALID_TIME_UNIT, unit);
+        }
+      }
+      return new TimeDiff(0, SECOND);
+    }
+
+    public Date offsetFrom(Date time) {
+      return DateUtils.add(time, calendarField, quantity);
+    }
+    public Date negativeOffsetFrom(Date time) {
+      return DateUtils.add(time, calendarField, -quantity);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index f5d2115..7857868 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -249,12 +249,12 @@ public class DenormalizationResolver implements 
ContextRewriter {
               throw new SemanticException("No reference column available for " 
+ refered);
             }
             PickedReference picked = new 
PickedReference(refered.references.iterator().next(),
-              cubeql.getAliasForTabName(refered.srcTable.getName()), tbl);
+              cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
             addPickedReference(refered.col.getName(), picked);
             pickedRefs.add(picked);
           } else {
             PickedReference picked =
-              new PickedReference(refered.col, 
cubeql.getAliasForTabName(refered.srcTable.getName()), tbl);
+              new PickedReference(refered.col, 
cubeql.getAliasForTableName(refered.srcTable.getName()), tbl);
             addPickedReference(refered.col.getName(), picked);
             pickedRefs.add(picked);
           }
@@ -300,7 +300,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
           return;
         }
         ASTNode newTableNode =
-          new ASTNode(new CommonToken(HiveParser.Identifier, 
query.getAliasForTabName(refered.getDestTable())));
+          new ASTNode(new CommonToken(HiveParser.Identifier, 
query.getAliasForTableName(refered.getDestTable())));
         tableNode.setChild(0, newTableNode);
         newTableNode.setParent(tableNode);
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index e200b12..ea461ad 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -62,13 +62,16 @@ abstract class DimHQLContext extends SimpleHQLContext {
   protected void setMissingExpressions() throws SemanticException {
     setFrom(getFromString());
     setWhere(joinWithAnd(
-      getQuery().getConf().getBoolean(
+      genWhereClauseWithDimPartitions(where), getQuery().getConf().getBoolean(
         CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, 
CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
-        ? getPostSelectionWhereClause() : null,
-      genWhereClauseWithDimPartitions(where)));
+        ? getPostSelectionWhereClause() : null));
   }
 
-  protected abstract String getPostSelectionWhereClause() throws 
SemanticException;
+  protected String getPostSelectionWhereClause() throws SemanticException {
+    return null;
+  }
+
+
 
   protected String getFromString() throws SemanticException {
     String fromString = getFromTable();
@@ -103,7 +106,8 @@ abstract class DimHQLContext extends SimpleHQLContext {
       for (Dimension dim : queriedDims) {
         CandidateDim cdim = dimsToQuery.get(dim);
         if (!cdim.isWhereClauseAdded() && 
!StringUtils.isBlank(cdim.getWhereClause())) {
-          appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, 
query.getAliasForTabName(dim.getName())), added);
+          appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, 
query.getAliasForTableName(dim.getName())),
+            added);
           added = true;
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 8a23ded..785b0e4 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -50,18 +50,13 @@ class DimOnlyHQLContext extends DimHQLContext {
   protected String getFromTable() throws SemanticException {
     if (query.getAutoJoinCtx() != null && 
query.getAutoJoinCtx().isJoinsResolved()) {
       return 
getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
-        
query.getAliasForTabName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));
+        
query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));
     } else {
       return query.getQBFromString(null, getDimsToQuery());
     }
   }
 
   @Override
-  protected String getPostSelectionWhereClause() throws SemanticException {
-    return null;
-  }
-
-  @Override
   protected Set<Dimension> getQueriedDimSet() {
     return getDimsToQuery().keySet();
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index c81cf34..5355049 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -156,7 +156,7 @@ class ExpressionResolver implements ContextRewriter {
           ASTNode current = (ASTNode) node.getChild(0);
           if (current.getToken().getType() == Identifier) {
             String tableName = current.getToken().getText().toLowerCase();
-            String alias = cubeql.getAliasForTabName(tableName);
+            String alias = cubeql.getAliasForTableName(tableName);
             if (!alias.equalsIgnoreCase(tableName)) {
               node.setChild(0, new ASTNode(new 
CommonToken(HiveParser.Identifier, alias)));
             }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
index 766769f..4bd89a7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FactHQLContext.java
@@ -47,14 +47,6 @@ public class FactHQLContext extends DimHQLContext {
   }
 
   @Override
-  protected String getPostSelectionWhereClause() throws SemanticException {
-    return StorageUtil.getNotLatestClauseForDimensions(
-      query.getAliasForTabName(query.getCube().getName()),
-      fact.getTimePartCols(query),
-      query.getTimeRanges().iterator().next().getPartitionColumn());
-  }
-
-  @Override
   protected Set<Dimension> getQueriedDimSet() {
     return factDims;
   }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index ddabc25..ffe6ebd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -127,7 +127,7 @@ public class FieldValidator implements ContextRewriter {
             // this 'tabName' is a join chain, so add all source columns
             
chainSourceColumns.addAll(cubeql.getJoinchains().get(tabName).getSourceColumns());
             nonQueryableColumns.add(tabName + "." + colName);
-          } else if 
(tabName.equalsIgnoreCase(cubeql.getAliasForTabName(cube.getName()))
+          } else if 
(tabName.equalsIgnoreCase(cubeql.getAliasForTableName(cube.getName()))
             && cube.getDimAttributeNames().contains(colName)) {
             // Alternatively, check if this is a dimension attribute, if yes 
add it to the dim attribute set
             // and non queryable fields set

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index a41424a..3e3534c 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -149,7 +149,7 @@ class JoinResolver implements ContextRewriter {
         }
       }
       if (root.getSubtrees().size() > 0) {
-        root.setAlias(cubeql.getAliasForTabName(
+        root.setAlias(cubeql.getAliasForTableName(
           
root.getSubtrees().keySet().iterator().next().getFromTable().getName()));
       }
       return root;
@@ -194,7 +194,7 @@ class JoinResolver implements ContextRewriter {
         // And for destination tables, an alias will be decided from here but 
might be
         // overridden outside this function.
         AbstractCubeTable destTable = tableRelationship.getToTable();
-        current.setAlias(cubeql.getAliasForTabName(destTable.getName()));
+        current.setAlias(cubeql.getAliasForTableName(destTable.getName()));
         if (aliasUsage.get(current.getAlias()) == null) {
           aliasUsage.put(current.getAlias(), 0);
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index fcc6114..9a92bee 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -24,8 +24,6 @@ import java.util.Map;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.metadata.TimePartition;
-import org.apache.lens.cube.metadata.TimePartitionRange;
 import org.apache.lens.cube.metadata.UpdatePeriod;
 import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
 import org.apache.lens.server.api.error.LensException;
@@ -56,26 +54,52 @@ class MaxCoveringFactResolver implements ContextRewriter {
       // redundant computation.
       return;
     }
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
-      Map<Set<CandidateFact>, Long> factPartCount = Maps.newHashMap();
-      for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-        factPartCount.put(facts, getTimeCovered(facts));
+    if (cubeql.getCube() == null || cubeql.getCandidateFactSets().size() <= 1) 
{
+      // nothing to prune.
+      return;
+    }
+    // For each part column, which candidate fact sets are covering how much 
amount.
+    // Later, we'll maximize coverage for each queried part column.
+    Map<String, Map<Set<CandidateFact>, Long>> partCountsPerPartCol = 
Maps.newHashMap();
+    for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
+      for (Map.Entry<String, Long> entry : 
getTimeCoveredForEachPartCol(facts).entrySet()) {
+        if (!partCountsPerPartCol.containsKey(entry.getKey())) {
+          partCountsPerPartCol.put(entry.getKey(), Maps.<Set<CandidateFact>, 
Long>newHashMap());
+        }
+        partCountsPerPartCol.get(entry.getKey()).put(facts, entry.getValue());
       }
-      long maxTimeCovered = Collections.max(factPartCount.values());
-      TimeCovered timeCovered = new TimeCovered(maxTimeCovered);
-      for (Iterator<Set<CandidateFact>> i = 
cubeql.getCandidateFactSets().iterator(); i.hasNext();) {
-        Set<CandidateFact> facts = i.next();
-        if (factPartCount.get(facts) < maxTimeCovered) {
-          LOG.info("Not considering facts:" + facts + " from candidate fact 
tables as it covers less time than the max."
-            + "which is: " + timeCovered);
-          i.remove();
+    }
+    // for each queried partition, prune fact sets that are covering less 
range than max
+    for (String partColQueried : cubeql.getPartitionColumnsQueried()) {
+      if (partCountsPerPartCol.get(partColQueried) != null) {
+        long maxTimeCovered = 
Collections.max(partCountsPerPartCol.get(partColQueried).values());
+        TimeCovered timeCovered = new TimeCovered(maxTimeCovered);
+        Iterator<Set<CandidateFact>> iter = 
cubeql.getCandidateFactSets().iterator();
+        while (iter.hasNext()) {
+          Set<CandidateFact> facts = iter.next();
+          Long timeCoveredLong = 
partCountsPerPartCol.get(partColQueried).get(facts);
+          if (timeCoveredLong == null) {
+            timeCoveredLong = 0L;
+          }
+          if (timeCoveredLong < maxTimeCovered) {
+            LOG.info(
+              "Not considering facts:" + facts + " from candidate fact tables 
as it covers less time than the max"
+                + " for partition column: " + partColQueried + " which is: " + 
timeCovered);
+            iter.remove();
+          }
         }
       }
-      
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(timeCovered));
     }
+    
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
   }
 
-  private long getTimeCovered(Set<CandidateFact> facts) {
+  /**
+   * Returns time covered by fact set for each part column.
+   * @param facts
+   * @return
+   */
+  private Map<String, Long> getTimeCoveredForEachPartCol(Set<CandidateFact> 
facts) {
+    Map<String, Long> ret = Maps.newHashMap();
     UpdatePeriod smallest = UpdatePeriod.values()[UpdatePeriod.values().length 
- 1];
     for (CandidateFact fact : facts) {
       for (FactPartition part : fact.getPartsQueried()) {
@@ -84,24 +108,22 @@ class MaxCoveringFactResolver implements ContextRewriter {
         }
       }
     }
-    RangesPartitionTimeline range = new RangesPartitionTimeline(null, 
smallest, null);
+    PartitionRangesForPartitionColumns partitionRangesForPartitionColumns = 
new PartitionRangesForPartitionColumns();
     for (CandidateFact fact : facts) {
       for (FactPartition part : fact.getPartsQueried()) {
         if (part.isFound()) {
           try {
-            TimePartitionRange subrange =
-              TimePartition.of(part.getPeriod(), 
part.getPartSpec()).singletonRange();
-            for (TimePartition partition : TimePartition.of(smallest, 
subrange.getBegin().getDate()).rangeUpto(
-              TimePartition.of(smallest, subrange.getEnd().getDate()))) {
-              range.add(partition);
-            }
+            partitionRangesForPartitionColumns.add(part);
           } catch (LensException e) {
             LOG.error("invalid partition: " + e);
           }
         }
       }
     }
-    return range.getTimeCovered();
+    for (Map.Entry<String, RangesPartitionTimeline> entry : 
partitionRangesForPartitionColumns.entrySet()) {
+      ret.put(entry.getKey(), entry.getValue().getTimeCovered());
+    }
+    return ret;
   }
 
   public static class TimeCovered {

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/PartitionRangesForPartitionColumns.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/PartitionRangesForPartitionColumns.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/PartitionRangesForPartitionColumns.java
new file mode 100644
index 0000000..b389a8f
--- /dev/null
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/PartitionRangesForPartitionColumns.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.TimePartition;
+import org.apache.lens.cube.metadata.UpdatePeriod;
+import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
+import org.apache.lens.server.api.error.LensException;
+
+import com.google.common.collect.Sets;
+
+
+public class PartitionRangesForPartitionColumns extends HashMap<String, 
RangesPartitionTimeline> {
+
+  public void add(String partCol, TimePartition partition) throws 
LensException {
+    if (get(partCol) == null) {
+      put(partCol, new RangesPartitionTimeline("", UpdatePeriod.values()[0], 
partCol));
+    }
+    get(partCol).add(partition.withUpdatePeriod(UpdatePeriod.values()[0])
+      .rangeUpto(partition.next().withUpdatePeriod(UpdatePeriod.values()[0])));
+  }
+
+  public Set<String> toSet() {
+    Set<String> ret = Sets.newHashSet();
+    for (Map.Entry<String, RangesPartitionTimeline> entry : entrySet()) {
+      ret.add(entry.getKey() + ":" + entry.getValue().getRanges());
+    }
+    return ret;
+  }
+
+  public void add(FactPartition part) throws LensException {
+    add(part.getPartCol(), part.getTimePartition());
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
index beaa551..e14fcfa 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactHQLContext.java
@@ -91,17 +91,9 @@ class SingleFactHQLContext extends DimOnlyHQLContext {
     return queryFormat.toString();
   }
 
-  @Override
-  protected String getPostSelectionWhereClause() throws SemanticException {
-    return StorageUtil.getNotLatestClauseForDimensions(
-      query.getAliasForTabName(query.getCube().getName()),
-      fact.getTimePartCols(query),
-      query.getTimeRanges().iterator().next().getPartitionColumn());
-  }
-
   protected String getFromTable() throws SemanticException {
     if (getQuery().getAutoJoinCtx() != null && 
getQuery().getAutoJoinCtx().isJoinsResolved()) {
-      return 
fact.getStorageString(getQuery().getAliasForTabName(getQuery().getCube().getName()));
+      return 
fact.getStorageString(getQuery().getAliasForTableName(getQuery().getCube().getName()));
     } else {
       return getQuery().getQBFromString(fact, getDimsToQuery());
     }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
index e5a6b32..bdc9855 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/StorageTableResolver.java
@@ -18,13 +18,17 @@
  */
 package org.apache.lens.cube.parse;
 
+import static org.apache.lens.cube.parse.DateUtil.WSPACE;
+import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
+
 import java.text.DateFormat;
 import java.text.ParseException;
 import java.text.SimpleDateFormat;
 import java.util.*;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.timeline.RangesPartitionTimeline;
 import 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCause;
 import org.apache.lens.cube.parse.CandidateTablePruneCause.SkipStorageCode;
@@ -39,8 +43,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.util.ReflectionUtils;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.Sets;
+import com.google.common.collect.Lists;
 
 /**
  * Resolve storages and partitions of all candidate tables and prunes 
candidate tables with missing storages or
@@ -309,6 +312,43 @@ class StorageTableResolver implements ContextRewriter {
     return tableName;
   }
 
+  private TimeRange getFallbackRange(TimeRange range, CandidateFact cfact, 
CubeQueryContext cubeql)
+    throws SemanticException {
+    Cube baseCube = cubeql.getBaseCube();
+    try {
+      ArrayList<String> tableNames = Lists.newArrayList(cfact.fact.getName(), 
cubeql.getCube().getName());
+      if (!cubeql.getCube().getName().equals(baseCube.getName())) {
+        tableNames.add(baseCube.getName());
+      }
+      String fallBackString = null;
+      String timedim = 
baseCube.getTimeDimOfPartitionColumn(range.getPartitionColumn());
+      for (String tableName : tableNames) {
+        fallBackString = 
cubeql.getMetastoreClient().getTable(tableName).getParameters()
+          .get(MetastoreConstants.TIMEDIM_RELATION + timedim);
+        if (StringUtils.isNotBlank(fallBackString)) {
+          break;
+        }
+      }
+      if (StringUtils.isBlank(fallBackString)) {
+        return null;
+      }
+      Matcher matcher = 
Pattern.compile("(.*?)\\+\\[(.*?),(.*?)\\]").matcher(fallBackString.replaceAll(WSPACE,
 ""));
+      if (!matcher.matches()) {
+        return null;
+      }
+      DateUtil.TimeDiff diff1 = 
DateUtil.TimeDiff.parseFrom(matcher.group(2).trim());
+      DateUtil.TimeDiff diff2 = 
DateUtil.TimeDiff.parseFrom(matcher.group(3).trim());
+      String relatedTimeDim = matcher.group(1).trim();
+      String fallbackPartCol = 
baseCube.getPartitionColumnOfTimeDim(relatedTimeDim);
+      return TimeRange.getBuilder()
+        .fromDate(diff2.negativeOffsetFrom(range.getFromDate()))
+        .toDate(diff1.negativeOffsetFrom(range.getToDate()))
+        .partitionColumn(fallbackPartCol).build();
+    } catch (HiveException e) {
+      throw new SemanticException(e);
+    }
+  }
+
   private void resolveFactStoragePartitions(CubeQueryContext cubeql) throws 
SemanticException {
     // Find candidate tables wrt supported storages
     Iterator<CandidateFact> i = cubeql.getCandidateFactTables().iterator();
@@ -316,29 +356,49 @@ class StorageTableResolver implements ContextRewriter {
       CandidateFact cfact = i.next();
       List<FactPartition> answeringParts = new ArrayList<FactPartition>();
       HashMap<String, SkipStorageCause> skipStorageCauses = new 
HashMap<String, SkipStorageCause>();
-      Map<UpdatePeriod, RangesPartitionTimeline> missingPartitionRanges = 
Maps.newHashMap();
+      PartitionRangesForPartitionColumns missingParts = new 
PartitionRangesForPartitionColumns();
       boolean noPartsForRange = false;
       for (TimeRange range : cubeql.getTimeRanges()) {
-        Set<FactPartition> rangeParts = getPartitions(cfact.fact, range, 
skipStorageCauses, missingPartitionRanges);
-        if (rangeParts == null || rangeParts.isEmpty()) {
-          LOG.info("No partitions for range:" + range);
+        StringBuilder extraWhereClause = new StringBuilder();
+        Set<FactPartition> rangeParts = getPartitions(cfact.fact, range, 
skipStorageCauses, missingParts);
+        // If no partitions were found, then we'll fallback.
+        String partcol = range.getPartitionColumn();
+        TimeRange prevRange = range;
+        String sep = "";
+        while (rangeParts.isEmpty()) {
+          // TODO: should we add a condition whether on range's partcol any 
missing partitions are not there
+          String timeDim = 
cubeql.getBaseCube().getTimeDimOfPartitionColumn(partcol);
+          if (!cfact.getColumns().contains(timeDim)) {
+            break;
+          }
+          TimeRange fallBackRange = getFallbackRange(prevRange, cfact, cubeql);
+          LOG.info("No partitions for range:" + range + ". fallback range: " + 
fallBackRange);
+          if (fallBackRange == null) {
+            break;
+          }
+          rangeParts = getPartitions(cfact.fact, fallBackRange, 
skipStorageCauses, missingParts);
+          extraWhereClause.append(sep)
+            
.append(prevRange.toTimeDimWhereClause(cubeql.getAliasForTableName(cubeql.getCube()),
 timeDim));
+          sep = " AND ";
+          partcol = fallBackRange.getPartitionColumn();
+          prevRange = fallBackRange;
+          if (!rangeParts.isEmpty()) {
+            break;
+          }
+        }
+        if (rangeParts.isEmpty()) {
+          LOG.info("No partitions for fallback range:" + range);
           noPartsForRange = true;
           continue;
         }
         cfact.incrementPartsQueried(rangeParts.size());
         answeringParts.addAll(rangeParts);
         cfact.getPartsQueried().addAll(rangeParts);
-        cfact.getRangeToWhereClause().put(range, 
rangeWriter.getTimeRangeWhereClause(cubeql,
-          cubeql.getAliasForTabName(cubeql.getCube().getName()), rangeParts));
-      }
-      Set<String> nonExistingParts = Sets.newHashSet();
-      if (!missingPartitionRanges.isEmpty()) {
-        for (UpdatePeriod period : missingPartitionRanges.keySet()) {
-          for (TimePartitionRange range : 
missingPartitionRanges.get(period).getRanges()) {
-            nonExistingParts.add(range.toString());
-          }
-        }
+        String rangeWhereClause = rangeWriter.getTimeRangeWhereClause(cubeql,
+          cubeql.getAliasForTableName(cubeql.getCube().getName()), rangeParts);
+        cfact.getRangeToWhereClause().put(range, joinWithAnd(rangeWhereClause, 
extraWhereClause.toString()));
       }
+      Set<String> nonExistingParts = missingParts.toSet();
       if (!nonExistingParts.isEmpty()) {
         addNonExistingParts(cfact.fact.getName(), nonExistingParts);
       }
@@ -391,10 +451,10 @@ class StorageTableResolver implements ContextRewriter {
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange range,
     HashMap<String, SkipStorageCause> skipStorageCauses,
-    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts) throws 
SemanticException {
+    PartitionRangesForPartitionColumns missingPartitions) throws 
SemanticException {
     try {
       return getPartitions(fact, range, getValidUpdatePeriods(fact), true, 
skipStorageCauses,
-        nonExistingParts);
+        missingPartitions);
     } catch (Exception e) {
       throw new SemanticException(e);
     }
@@ -402,12 +462,12 @@ class StorageTableResolver implements ContextRewriter {
 
   private Set<FactPartition> getPartitions(CubeFactTable fact, TimeRange 
range, TreeSet<UpdatePeriod> updatePeriods,
     boolean addNonExistingParts, Map<String, SkipStorageCause> 
skipStorageCauses,
-    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts)
+    PartitionRangesForPartitionColumns missingPartitions)
     throws Exception {
     Set<FactPartition> partitions = new TreeSet<FactPartition>();
-    if (range.isCoverableBy(updatePeriods)
+    if (range != null && range.isCoverableBy(updatePeriods)
       && getPartitions(fact, range.getFromDate(), range.getToDate(), 
range.getPartitionColumn(), partitions,
-        updatePeriods, addNonExistingParts, skipStorageCauses, 
nonExistingParts)) {
+        updatePeriods, addNonExistingParts, skipStorageCauses, 
missingPartitions)) {
       return partitions;
     } else {
       return new TreeSet<FactPartition>();
@@ -417,7 +477,7 @@ class StorageTableResolver implements ContextRewriter {
   private boolean getPartitions(CubeFactTable fact, Date fromDate, Date 
toDate, String partCol,
     Set<FactPartition> partitions, TreeSet<UpdatePeriod> updatePeriods,
     boolean addNonExistingParts, Map<String, SkipStorageCause> 
skipStorageCauses,
-    Map<UpdatePeriod, RangesPartitionTimeline> nonExistingParts)
+    PartitionRangesForPartitionColumns missingPartitions)
     throws Exception {
     LOG.info("getPartitions for " + fact + " from fromDate:" + fromDate + " 
toDate:" + toDate);
     if (fromDate.equals(toDate) || fromDate.after(toDate)) {
@@ -503,7 +563,7 @@ class StorageTableResolver implements ContextRewriter {
                 LOG.info("Looking for process time partitions between " + pdt 
+ " and " + nextPdt);
                 Set<FactPartition> processTimeParts =
                   getPartitions(fact, 
TimeRange.getBuilder().fromDate(pdt).toDate(nextPdt).partitionColumn(
-                    processTimePartCol).build(), newset, false, 
skipStorageCauses, nonExistingParts);
+                    processTimePartCol).build(), newset, false, 
skipStorageCauses, missingPartitions);
                 LOG.info("Look ahead partitions: " + processTimeParts);
                 TimeRange timeRange = 
TimeRange.getBuilder().fromDate(dt).toDate(nextDt).build();
                 for (FactPartition pPart : processTimeParts) {
@@ -513,10 +573,11 @@ class StorageTableResolver implements ContextRewriter {
                       partWhereClauseFormat));
                   }
                   LOG.info("added all sub partitions blindly in pPart: " + 
pPart);
-//                          if (!getPartitions(fact, dt, cal.getTime(), 
partCol, pPart, partitions, newset, false,
-//                            skipStorageCauses, nonExistingParts)) {
-//                            LOG.info("No partitions found in look ahead 
range");
-//                          }
+                  //                          if (!getPartitions(fact, dt, 
cal.getTime(), partCol, pPart, partitions,
+                  // newset, false,
+                  //                            skipStorageCauses, 
nonExistingParts)) {
+                  //                            LOG.info("No partitions found 
in look ahead range");
+                  //                          }
                 }
               }
             }
@@ -528,15 +589,12 @@ class StorageTableResolver implements ContextRewriter {
         newset.addAll(updatePeriods);
         newset.remove(interval);
         if (!getPartitions(fact, dt, nextDt, partCol, partitions, newset, 
false, skipStorageCauses,
-          nonExistingParts)) {
+          missingPartitions)) {
 
           LOG.info("Adding non existing partition" + part);
           if (addNonExistingParts) {
             // Add non existing partitions for all cases of whether we 
populate all non existing or not.
-            if (!nonExistingParts.containsKey(part.getPeriod())) {
-              nonExistingParts.put(part.getPeriod(), new 
RangesPartitionTimeline(null, null, null));
-            }
-            
nonExistingParts.get(part.getPeriod()).add(TimePartition.of(part.getPeriod(), 
dt));
+            missingPartitions.add(part);
             if (!failOnPartialData) {
               partitions.add(part);
               // add all storage tables as the answering tables
@@ -552,9 +610,9 @@ class StorageTableResolver implements ContextRewriter {
       }
     }
     return getPartitions(fact, fromDate, ceilFromDate, partCol, partitions,
-      updatePeriods, addNonExistingParts, skipStorageCauses, nonExistingParts)
+      updatePeriods, addNonExistingParts, skipStorageCauses, missingPartitions)
       && getPartitions(fact, floorToDate, toDate, partCol, partitions,
-        updatePeriods, addNonExistingParts, skipStorageCauses, 
nonExistingParts);
+        updatePeriods, addNonExistingParts, skipStorageCauses, 
missingPartitions);
   }
 
   private void updateFactPartitionStorageTablesFrom(CubeFactTable fact, 
FactPartition part,

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
index 612cdf9..c9c9cc6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRange.java
@@ -24,6 +24,7 @@ import java.util.TreeSet;
 
 import org.apache.lens.cube.metadata.UpdatePeriod;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -130,6 +131,21 @@ public class TimeRange {
     }
   }
 
+  public String toTimeDimWhereClause() {
+    return toTimeDimWhereClause(null, partitionColumn);
+  }
+
+  public String toTimeDimWhereClause(String prefix, String column) {
+    if (StringUtils.isNotBlank(column)) {
+      column = prefix + "." + column;
+    }
+    return new StringBuilder()
+      .append(column).append(" >= 
'").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(fromDate)).append("'")
+      .append(" AND ")
+      .append(column).append(" < 
'").append(DateUtil.HIVE_QUERY_DATE_PARSER.get().format(toDate)).append("'")
+      .toString();
+  }
+
   @Override
   public String toString() {
     return partitionColumn + " [" + fromDate + ":" + toDate + "]";

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 1a30228..e5e7c56 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -136,7 +136,6 @@ class TimerangeResolver implements ContextRewriter {
 
     TimeRange range = builder.build();
     range.validate();
-
     cubeql.getTimeRanges().add(range);
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 3203354..1fe4173 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -39,7 +39,6 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.mapred.TextInputFormat;
@@ -108,6 +107,7 @@ public class CubeTestSetup {
   // Time Ranges
   public static final String LAST_HOUR_TIME_RANGE;
   public static final String TWO_DAYS_RANGE;
+  public static final String TWO_DAYS_RANGE_TTD;
   public static final String THIS_YEAR_RANGE;
   public static final String TWO_MONTHS_RANGE_UPTO_MONTH;
   public static final String TWO_MONTHS_RANGE_UPTO_HOURS;
@@ -157,17 +157,19 @@ public class CubeTestSetup {
     THIS_YEAR_START = DateUtils.truncate(NOW, 
UpdatePeriod.YEARLY.calendarField());
     THIS_YEAR_END = DateUtils.addYears(THIS_YEAR_START, 1);
     TWO_DAYS_RANGE_BEFORE_4_DAYS =
-      "time_range_in(dt, '" + 
CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
+      "time_range_in(d_time, '" + 
CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_START) + "','"
         + CubeTestSetup.getDateUptoHours(BEFORE_4_DAYS_END) + "')";
 
 
-    TWO_DAYS_RANGE = "time_range_in(dt, '" + getDateUptoHours(TWODAYS_BACK) + 
"','" + getDateUptoHours(NOW) + "')";
+    TWO_DAYS_RANGE = "time_range_in(d_time, '" + 
getDateUptoHours(TWODAYS_BACK) + "','" + getDateUptoHours(NOW) + "')";
+    TWO_DAYS_RANGE_TTD = "time_range_in(test_time_dim, '" + 
getDateUptoHours(TWODAYS_BACK) + "','"
+      + getDateUptoHours(NOW) + "')";
     THIS_YEAR_RANGE =
-      "time_range_in(dt, '" + getDateUptoHours(THIS_YEAR_START) + "','" + 
getDateUptoHours(THIS_YEAR_END) + "')";
+      "time_range_in(d_time, '" + getDateUptoHours(THIS_YEAR_START) + "','" + 
getDateUptoHours(THIS_YEAR_END) + "')";
     TWO_MONTHS_RANGE_UPTO_MONTH =
-      "time_range_in(dt, '" + getDateUptoMonth(TWO_MONTHS_BACK) + "','" + 
getDateUptoMonth(NOW) + "')";
+      "time_range_in(d_time, '" + getDateUptoMonth(TWO_MONTHS_BACK) + "','" + 
getDateUptoMonth(NOW) + "')";
     TWO_MONTHS_RANGE_UPTO_HOURS =
-      "time_range_in(dt, '" + getDateUptoHours(TWO_MONTHS_BACK) + "','" + 
getDateUptoHours(NOW) + "')";
+      "time_range_in(d_time, '" + getDateUptoHours(TWO_MONTHS_BACK) + "','" + 
getDateUptoHours(NOW) + "')";
 
     // calculate LAST_HOUR_TIME_RANGE
     LAST_HOUR_TIME_RANGE = getTimeRangeString(getDateUptoHours(LAST_HOUR), 
getDateUptoHours(NOW));
@@ -448,23 +450,6 @@ public class CubeTestSetup {
     return getExpectedQuery(dimName, selExpr, null, null, postWhereExpr, 
storageTable, hasPart);
   }
 
-  public static List<String> getNotLatestConditions(final String cubeName, 
final String timePart,
-    final String storageTableName) throws SemanticException {
-    return new ArrayList<String>() {
-      {
-        try {
-          for (FieldSchema fs : 
Hive.get().getTable(storageTableName).getPartitionKeys()) {
-            if (!fs.getName().equals(timePart)) {
-              add(cubeName + "." + fs.getName() + " != '" + 
StorageConstants.LATEST_PARTITION_VALUE + "'");
-            }
-          }
-        } catch (HiveException e) {
-          throw new SemanticException(e);
-        }
-      }
-    };
-  }
-
   public static String getExpectedQuery(String dimName, String selExpr, String 
joinExpr, String whereExpr,
     String postWhereExpr, String storageTable, boolean hasPart) {
     StringBuilder expected = new StringBuilder();
@@ -516,6 +501,8 @@ public class CubeTestSetup {
       "New measure", null, null, null, NOW, null, 100.0));
 
     cubeDimensions = new HashSet<CubeDimAttribute>();
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("d_time", 
"timestamp", "d time")));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema("processing_time", 
"timestamp", "processing time")));
     List<CubeDimAttribute> locationHierarchy = new 
ArrayList<CubeDimAttribute>();
     locationHierarchy.add(new ReferencedDimAtrribute(new 
FieldSchema("zipcode", "int", "zip"), "Zip refer",
       new TableReference("zipdim", "code")));
@@ -593,13 +580,14 @@ public class CubeTestSetup {
 
     Map<String, String> cubeProperties = new HashMap<String, String>();
     
cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(TEST_CUBE_NAME),
-      "dt,pt,it,et,test_time_dim,test_time_dim2");
+      "d_time,pt,it,et,test_time_dim,test_time_dim2");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"test_time_dim", "ttd");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"test_time_dim2", "ttd2");
-    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "dt", 
"dt");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"d_time", "dt");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "it", 
"it");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "et", 
"et");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "pt", 
"pt");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "d_time", 
"test_time_dim+[-10 days,10 days]");
 
     client.createCube(TEST_CUBE_NAME, cubeMeasures, cubeDimensions, exprs, 
joinchains, cubeProperties);
 
@@ -631,13 +619,15 @@ public class CubeTestSetup {
 
     Map<String, String> cubeProperties = new HashMap<String, String>();
     
cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
-      "dt,pt,it,et,test_time_dim,test_time_dim2");
+      "d_time,pt,it,et,test_time_dim,test_time_dim2");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"test_time_dim", "ttd");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"test_time_dim2", "ttd2");
-    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "dt", 
"dt");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"d_time", "dt");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "it", 
"it");
     cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "et", 
"et");
-    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + "pt", 
"pt");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_TO_PART_MAPPING_PFX + 
"processing_time", "pt");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + "d_time", 
"processing_time+[-5 days,5 days]");
+    cubeProperties.put(MetastoreConstants.TIMEDIM_RELATION + 
"processing_time", "test_time_dim+[-5 days,5 days]");
     cubeProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, "false");
 
     Set<JoinChain> joinchains = new HashSet<JoinChain>() {
@@ -803,6 +793,8 @@ public class CubeTestSetup {
     }
 
     // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
     factColumns.add(new FieldSchema("zipcode", "int", "zip"));
     factColumns.add(new FieldSchema("cityid", "int", "city id"));
     factColumns.add(new FieldSchema("stateid", "int", "city id"));
@@ -819,6 +811,8 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("msr12", "float", "second measure"));
 
     // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
     factColumns.add(new FieldSchema("dim2", "int", "dim2 id"));
@@ -833,6 +827,8 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 
     // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
 
@@ -846,6 +842,8 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("msr12", "float", "second measure"));
 
     // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
     factColumns.add(new FieldSchema("dim12", "string", "base dim"));
@@ -872,6 +870,8 @@ public class CubeTestSetup {
     factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
 
     // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
     factColumns.add(new FieldSchema("dim1", "string", "base dim"));
     factColumns.add(new FieldSchema("dim11", "string", "base dim"));
     factColumns.add(new FieldSchema("dim12", "string", "base dim"));
@@ -1299,7 +1299,7 @@ public class CubeTestSetup {
   }
 
   // DimWithTwoStorages
-  private void createCityTbale(CubeMetastoreClient client) throws 
HiveException, ParseException {
+  private void createCityTable(CubeMetastoreClient client) throws 
HiveException, ParseException {
     Set<CubeDimAttribute> cityAttrs = new HashSet<CubeDimAttribute>();
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("id", "int", "code")));
     cityAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", "city 
name")));
@@ -1880,7 +1880,7 @@ public class CubeTestSetup {
       createCubeFactOnlyHourly(client);
       createCubeFactOnlyHourlyRaw(client);
 
-      createCityTbale(client);
+      createCityTable(client);
       // For join resolver test
       createTestDim2(client);
       createTestDim3(client);
@@ -2122,6 +2122,6 @@ public class CubeTestSetup {
 
 
   private static String getTimeRangeString(final String startDate, final 
String endDate) {
-    return "time_range_in(dt, '" + startDate + "','" + endDate + "')";
+    return "time_range_in(d_time, '" + startDate + "','" + endDate + "')";
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/dc1fafa9/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
index 37c578b..4f61671 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestAggregateResolver.java
@@ -32,8 +32,11 @@ import org.testng.Assert;
 import org.testng.annotations.BeforeTest;
 import org.testng.annotations.Test;
 
+import lombok.Getter;
+
 public class TestAggregateResolver extends TestQueryRewrite {
 
+  @Getter
   private Configuration conf;
   private final String cubeName = CubeTestSetup.TEST_CUBE_NAME;
 
@@ -45,14 +48,6 @@ public class TestAggregateResolver extends TestQueryRewrite {
     conf.setBoolean(CubeQueryConfUtil.ENABLE_SELECT_TO_GROUPBY, true);
     conf.setBoolean(CubeQueryConfUtil.ENABLE_GROUP_BY_TO_SELECT, true);
   }
-  private Configuration getConf() {
-    return new Configuration(conf);
-  }
-  private Configuration getConf(String storages) {
-    Configuration conf = getConf();
-    conf.set(CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, storages);
-    return conf;
-  }
 
   private CubeQueryContext rewrittenQuery;
 
@@ -148,7 +143,7 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
       compareQueries(expected[i], hql);
     }
     aggregateFactSelectionTests(conf);
-    rawFactSelectionTests(getConf("C1,C2"));
+    rawFactSelectionTests(getConfWithStorages("C1,C2"));
   }
 
   @Test
@@ -202,7 +197,7 @@ public class TestAggregateResolver extends TestQueryRewrite 
{
 
   @Test
   public void testAggregateResolverOff() throws SemanticException, 
ParseException, LensException {
-    Configuration conf2 = getConf("C1,C2");
+    Configuration conf2 = getConfWithStorages("C1,C2");
     conf2.setBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER, true);
 
     // Test if raw fact is selected for query with no aggregate function on a

Reply via email to