Repository: lens
Updated Branches:
  refs/heads/master 017c40310 -> 934f84573


LENS-197 : Allow start and end times for columns in fact tables


Project: http://git-wip-us.apache.org/repos/asf/lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/lens/commit/934f8457
Tree: http://git-wip-us.apache.org/repos/asf/lens/tree/934f8457
Diff: http://git-wip-us.apache.org/repos/asf/lens/diff/934f8457

Branch: refs/heads/master
Commit: 934f845737558349af00ce4de6ffcb16c6fa904b
Parents: 017c403
Author: Sushil Mohanty <[email protected]>
Authored: Mon Aug 8 10:04:10 2016 +0530
Committer: Amareshwari Sriramadasu <[email protected]>
Committed: Mon Aug 8 10:04:10 2016 +0530

----------------------------------------------------------------------
 lens-api/src/main/resources/cube-0.1.xsd        |   2 +
 .../lens/cube/metadata/MetastoreConstants.java  |   2 +
 .../lens/cube/parse/CandidateTableResolver.java |  83 +++++-
 .../lens/cube/parse/CubeQueryRewriter.java      |   6 +-
 .../lens/cube/parse/TimeRangeChecker.java       | 258 +++++++++++++++++++
 .../lens/cube/parse/TimerangeResolver.java      | 124 ---------
 .../apache/lens/cube/parse/CubeTestSetup.java   |  45 +++-
 .../lens/cube/parse/TestBaseCubeQueries.java    |   2 +-
 .../lens/cube/parse/TestCubeRewriter.java       |  20 ++
 .../lens/cube/parse/TestQueryMetrics.java       |  31 +--
 lens-examples/src/main/resources/fact1.xml      |   2 +
 .../src/test/resources/yaml/fact1.yaml          |   6 +
 .../metastore/CubeMetastoreServiceImpl.java     |   8 +-
 .../apache/lens/server/metastore/JAXBUtils.java |  16 ++
 .../server/metastore/TestMetastoreService.java  |  12 +
 15 files changed, 459 insertions(+), 158 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-api/src/main/resources/cube-0.1.xsd
----------------------------------------------------------------------
diff --git a/lens-api/src/main/resources/cube-0.1.xsd 
b/lens-api/src/main/resources/cube-0.1.xsd
index d6c6bd2..f438f48 100644
--- a/lens-api/src/main/resources/cube-0.1.xsd
+++ b/lens-api/src/main/resources/cube-0.1.xsd
@@ -711,6 +711,8 @@
       </xs:annotation>
     </xs:attribute>
     <xs:attribute name="comment" type="xs:string"/>
+    <xs:attribute name="start_time" type="xs:string"/>
+    <xs:attribute name="end_time" type="xs:string"/>
   </xs:complexType>
 
   <xs:simpleType name="x_measure_type">

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
index 61675bc..4585ef7 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/metadata/MetastoreConstants.java
@@ -55,6 +55,8 @@ public final class MetastoreConstants {
   public static final String FACT_RELATIVE_START_TIME = 
"cube.fact.relative.start.time";
   public static final String FACT_ABSOLUTE_END_TIME = 
"cube.fact.absolute.end.time";
   public static final String FACT_RELATIVE_END_TIME = 
"cube.fact.relative.end.time";
+  public static final String FACT_COL_START_TIME_PFX = 
"cube.fact.col.start.time.";
+  public static final String FACT_COL_END_TIME_PFX = "cube.fact.col.end.time.";
 
   // Segmentation constants
   public static final String SEGMENTATION_KEY_PFX = 
"cube.segmentation.internal.";

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
index 12d6e3a..83e5088 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CandidateTableResolver.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.collect.Sets;
 
+import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -175,6 +176,58 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
+  public static boolean isColumnAvailableInRange(final TimeRange range, Date 
startTime, Date endTime) {
+    return (isColumnAvailableFrom(range.getFromDate(), startTime)
+        && isColumnAvailableTill(range.getToDate(), endTime));
+  }
+
+  public static boolean isColumnAvailableFrom(@NonNull final Date date, Date 
startTime) {
+    return (startTime == null) ? true : date.equals(startTime) || 
date.after(startTime);
+  }
+
+  public static boolean isColumnAvailableTill(@NonNull final Date date, Date 
endTime) {
+    return (endTime == null) ? true : date.equals(endTime) || 
date.before(endTime);
+  }
+
+  public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, 
CandidateTable cfact, String col) {
+    for(TimeRange range : cubeql.getTimeRanges()) {
+      if (!isColumnAvailableInRange(range, getFactColumnStartTime(cfact, col), 
getFactColumnEndTime(cfact, col))) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  public static Date getFactColumnStartTime(CandidateTable table, String 
factCol) {
+    Date startTime = null;
+    if (table instanceof CandidateFact) {
+      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) 
{
+        if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
+          String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_START_TIME_PFX);
+          if (factCol.equals(propCol)) {
+            startTime = ((CandidateFact) table).fact.getDateFromProperty(key, 
false, true);
+          }
+        }
+      }
+    }
+    return startTime;
+  }
+
+  public static Date getFactColumnEndTime(CandidateTable table, String 
factCol) {
+    Date endTime = null;
+    if (table instanceof CandidateFact) {
+      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) 
{
+        if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
+          String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_END_TIME_PFX);
+          if (factCol.equals(propCol)) {
+            endTime = ((CandidateFact) table).fact.getDateFromProperty(key, 
false, true);
+          }
+        }
+      }
+    }
+    return endTime;
+  }
+
   private void resolveCandidateFactTables(CubeQueryContext cubeql) throws 
LensException {
     if (cubeql.getCube() != null) {
       String str = 
cubeql.getConf().get(CubeQueryConfUtil.getValidFactTablesKey(cubeql.getCube().getName()));
@@ -196,7 +249,6 @@ class CandidateTableResolver implements ContextRewriter {
             continue;
           }
         }
-
         // go over the columns accessed in the query and find out which tables
         // can answer the query
         // the candidate facts should have all the dimensions queried and
@@ -212,14 +264,16 @@ class CandidateTableResolver implements ContextRewriter {
               toRemove = true;
               break;
             }
+          } else if (!isFactColumnValidForRange(cubeql, cfact, col)) {
+            toRemove = true;
+            break;
           }
         }
-
         // go over join chains and prune facts that dont have any of the 
columns in each chain
         for (JoinChain chain : cubeql.getJoinchains().values()) {
           OptionalDimCtx optdim = 
cubeql.getOptionalDimensionMap().get(Aliased.create((Dimension)cubeql.getCubeTbls()
             .get(chain.getName()), chain.getName()));
-          if (!checkForColumnExists(cfact, chain.getSourceColumns())) {
+          if (!checkForFactColumnExistsAndValidForRange(cfact, 
chain.getSourceColumns(), cubeql)) {
             // check if chain is optional or not
             if (optdim == null) {
               log.info("Not considering fact table:{} as columns {} are not 
available", cfact,
@@ -247,7 +301,7 @@ class CandidateTableResolver implements ContextRewriter {
         // check if the candidate fact has atleast one measure queried
         // if expression has measures, they should be considered along with 
other measures and see if the fact can be
         // part of measure covering set
-        if (!checkForColumnExists(cfact, queriedMsrs)
+        if (!checkForFactColumnExistsAndValidForRange(cfact, queriedMsrs, 
cubeql)
           && (cubeql.getQueriedExprsWithMeasures().isEmpty()
             || 
cubeql.getExprCtx().allNotEvaluable(cubeql.getQueriedExprsWithMeasures(), 
cfact))) {
           log.info("Not considering fact table:{} as columns {},{} is not 
available", cfact, queriedMsrs,
@@ -308,7 +362,7 @@ class CandidateTableResolver implements ContextRewriter {
       CandidateFact cfact = i.next();
       i.remove();
       // cfact does not contain any of msrs and none of exprsWithMeasures are 
evaluable.
-      if ((msrs.isEmpty() || !checkForColumnExists(cfact, msrs))
+      if ((msrs.isEmpty() || !checkForFactColumnExistsAndValidForRange(cfact, 
msrs, cubeql))
         && (exprsWithMeasures.isEmpty() || 
cubeql.getExprCtx().allNotEvaluable(exprsWithMeasures, cfact))) {
         // ignore the fact
         continue;
@@ -365,7 +419,7 @@ class CandidateTableResolver implements ContextRewriter {
             OptionalDimCtx optdim = 
cubeql.getOptionalDimensionMap().get(reachableDim);
             Collection<String> colSet = joincolumnsEntry.getValue().get(dim);
 
-            if (!checkForColumnExists(cdim, colSet)) {
+            if (!checkForFactColumnExistsAndValidForRange(cdim, colSet, 
cubeql)) {
               if (optdim == null || optdim.isRequiredInJoinChain
                 || (optdim != null && 
optdim.requiredForCandidates.contains(cdim))) {
                 i.remove();
@@ -385,7 +439,7 @@ class CandidateTableResolver implements ContextRewriter {
               OptionalDimCtx optdim = 
cubeql.getOptionalDimensionMap().get(reachableDim);
               Collection<String> colSet = joincolumnsEntry.getValue().get(dim);
 
-              if (!checkForColumnExists(cdim, colSet)) {
+              if (!checkForFactColumnExistsAndValidForRange(cdim, colSet, 
cubeql)) {
                 if (optdim == null || optdim.isRequiredInJoinChain
                   || (optdim != null && 
optdim.requiredForCandidates.contains(cdim))) {
                   i.remove();
@@ -401,7 +455,8 @@ class CandidateTableResolver implements ContextRewriter {
           if (!removed) {
             // go over the referenced columns accessed in the query and find 
out which tables can participate
             if (cubeql.getOptionalDimensionMap().get(aliasedDim) != null
-              && !checkForColumnExists(cdim, 
cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried)) {
+              && !checkForFactColumnExistsAndValidForRange(cdim,
+                cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried, 
cubeql)) {
               i.remove();
               log.info("Not considering optional dimtable:{} as its denorm 
fields do not exist. Denorm fields:{}",
                 dimtable, 
cubeql.getOptionalDimensionMap().get(aliasedDim).colQueried);
@@ -445,7 +500,7 @@ class CandidateTableResolver implements ContextRewriter {
           OptionalDimCtx optdim = 
cubeql.getOptionalDimensionMap().get(reachableDim);
           colSet = joincolumnsEntry.getValue().get(cubeql.getCube());
 
-          if (!checkForColumnExists(cfact, colSet)) {
+          if (!checkForFactColumnExistsAndValidForRange(cfact, colSet, 
cubeql)) {
             if (optdim == null || optdim.isRequiredInJoinChain
               || (optdim != null && 
optdim.requiredForCandidates.contains(cfact))) {
               i.remove();
@@ -497,7 +552,7 @@ class CandidateTableResolver implements ContextRewriter {
           remove = true;
         } else {
           List<String> colSet = 
cubeql.getAutoJoinCtx().getJoinPathFromColumns().get(dim).get(candidate.getBaseTable());
-          if (!checkForColumnExists(candidate, colSet)) {
+          if (!checkForFactColumnExistsAndValidForRange(candidate, colSet, 
cubeql)) {
             log.info("Removing candidate {} from requiredForCandidates of {}, 
as columns:{} do not exist", candidate,
               dim, colSet);
             remove = true;
@@ -662,13 +717,15 @@ class CandidateTableResolver implements ContextRewriter {
     }
   }
 
-  // The candidate table contains atleast one column in the colSet
-  static boolean checkForColumnExists(CandidateTable table, Collection<String> 
colSet) {
+  // The candidate table contains atleast one column in the colSet and
+  // column can the queried in the range specified
+  static boolean checkForFactColumnExistsAndValidForRange(CandidateTable 
table, Collection<String> colSet,
+                                                          CubeQueryContext 
cubeql) {
     if (colSet == null || colSet.isEmpty()) {
       return true;
     }
     for (String column : colSet) {
-      if (table.getColumns().contains(column)) {
+      if (table.getColumns().contains(column) &&  
isFactColumnValidForRange(cubeql, table, column)) {
         return true;
       }
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index c1fd0a5..b612173 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -145,6 +145,8 @@ public class CubeQueryRewriter {
     rewriters.add(exprResolver);
     // De-normalized columns resolved
     rewriters.add(denormResolver);
+    // Resolve time ranges
+    rewriters.add(new TimerangeResolver(conf));
     // Resolve candidate fact tables and dimension tables for columns queried
     rewriters.add(candidateTblResolver);
     // Resolve aggregations and generate base select tree
@@ -153,8 +155,8 @@ public class CubeQueryRewriter {
     rewriters.add(new FieldValidator());
     // Resolve joins and generate base join tree
     rewriters.add(new JoinResolver(conf));
-    // resolve time ranges and do col life validation
-    rewriters.add(new TimerangeResolver(conf));
+    // Do col life validation
+    rewriters.add(new TimeRangeChecker(conf));
     // Resolve candidate fact tables and dimension tables for columns included
     // in join and denorm resolvers
     rewriters.add(candidateTblResolver);

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
new file mode 100644
index 0000000..ca176ee
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
@@ -0,0 +1,258 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
+
+import java.util.*;
+
+import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
+import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
+import org.apache.lens.cube.error.LensCubeErrorCode;
+import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.join.JoinPath;
+import org.apache.lens.cube.parse.join.AutoJoinContext;
+import org.apache.lens.server.api.LensConfConstants;
+import org.apache.lens.server.api.error.LensException;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+
+import com.google.common.collect.Lists;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class TimeRangeChecker implements ContextRewriter {
+  public TimeRangeChecker(Configuration conf) {
+  }
+  @Override
+  public void rewriteContext(CubeQueryContext cubeql) throws LensException {
+    if (cubeql.getCube() == null) {
+      return;
+    }
+    doColLifeValidation(cubeql);
+    doFactRangeValidation(cubeql);
+  }
+  private void extractTimeRange(CubeQueryContext cubeql) throws LensException {
+    // get time range -
+    // Time range should be direct child of where condition
+    // TOK_WHERE.TOK_FUNCTION.Identifier Or, it should be right hand child of
+    // AND condition TOK_WHERE.KW_AND.TOK_FUNCTION.Identifier
+    if (cubeql.getWhereAST() == null || cubeql.getWhereAST().getChildCount() < 
1) {
+      throw new 
LensException(LensCubeErrorCode.NO_TIMERANGE_FILTER.getLensErrorInfo());
+    }
+    searchTimeRanges(cubeql.getWhereAST(), cubeql, null, 0);
+  }
+
+  private void searchTimeRanges(ASTNode root, CubeQueryContext cubeql, ASTNode 
parent, int childIndex)
+    throws LensException {
+    if (root == null) {
+      return;
+    } else if (root.getToken().getType() == TOK_FUNCTION) {
+      ASTNode fname = HQLParser.findNodeByPath(root, Identifier);
+      if (fname != null && 
CubeQueryContext.TIME_RANGE_FUNC.equalsIgnoreCase(fname.getText())) {
+        processTimeRangeFunction(cubeql, root, parent, childIndex);
+      }
+    } else {
+      for (int i = 0; i < root.getChildCount(); i++) {
+        ASTNode child = (ASTNode) root.getChild(i);
+        searchTimeRanges(child, cubeql, root, i);
+      }
+    }
+  }
+
+  private String getColumnName(ASTNode node) {
+    String column = null;
+    if (node.getToken().getType() == DOT) {
+      ASTNode colIdent = (ASTNode) node.getChild(1);
+      column = colIdent.getText().toLowerCase();
+    } else if (node.getToken().getType() == TOK_TABLE_OR_COL) {
+      // Take child ident.totext
+      ASTNode ident = (ASTNode) node.getChild(0);
+      column = ident.getText().toLowerCase();
+    }
+    return column;
+  }
+
+  private void processTimeRangeFunction(CubeQueryContext cubeql, ASTNode 
timenode, ASTNode parent, int childIndex)
+    throws LensException {
+    TimeRange.TimeRangeBuilder builder = TimeRange.getBuilder();
+    builder.astNode(timenode);
+    builder.parent(parent);
+    builder.childIndex(childIndex);
+
+    String timeDimName = getColumnName((ASTNode) timenode.getChild(1));
+
+    if (!cubeql.getCube().getTimedDimensions().contains(timeDimName)) {
+      throw new 
LensException(LensCubeErrorCode.NOT_A_TIMED_DIMENSION.getLensErrorInfo(), 
timeDimName);
+    }
+    // Replace timeDimName with column which is used for partitioning. Assume
+    // the same column
+    // is used as a partition column in all storages of the fact
+    timeDimName = cubeql.getPartitionColumnOfTimeDim(timeDimName);
+    builder.partitionColumn(timeDimName);
+
+    String fromDateRaw = PlanUtils.stripQuotes(timenode.getChild(2).getText());
+    String toDateRaw = null;
+    if (timenode.getChildCount() > 3) {
+      ASTNode toDateNode = (ASTNode) timenode.getChild(3);
+      if (toDateNode != null) {
+        toDateRaw = PlanUtils.stripQuotes(timenode.getChild(3).getText());
+      }
+    }
+    long currentTime = 
cubeql.getConf().getLong(LensConfConstants.QUERY_CURRENT_TIME_IN_MILLIS, 0);
+    Date now;
+    if (currentTime != 0) {
+      now = new Date(currentTime);
+    } else {
+      now = new Date();
+    }
+    builder.fromDate(DateUtil.resolveDate(fromDateRaw, now));
+    if (StringUtils.isNotBlank(toDateRaw)) {
+      builder.toDate(DateUtil.resolveDate(toDateRaw, now));
+    } else {
+      builder.toDate(now);
+    }
+
+    TimeRange range = builder.build();
+    range.validate();
+    cubeql.getTimeRanges().add(range);
+  }
+
+  private void doColLifeValidation(CubeQueryContext cubeql) throws 
LensException,
+      ColUnAvailableInTimeRangeException {
+    Set<String> cubeColumns = 
cubeql.getColumnsQueried(cubeql.getCube().getName());
+    if (cubeColumns == null || cubeColumns.isEmpty()) {
+      // Query doesn't have any columns from cube
+      return;
+    }
+
+    for (String col : cubeql.getColumnsQueried(cubeql.getCube().getName())) {
+      CubeColumn column = cubeql.getCube().getColumnByName(col);
+      for (TimeRange range : cubeql.getTimeRanges()) {
+        if (column == null) {
+          if (!cubeql.getCube().getTimedDimensions().contains(col)) {
+            throw new 
LensException(LensCubeErrorCode.NOT_A_CUBE_COLUMN.getLensErrorInfo(), col);
+          }
+          continue;
+        }
+        if (!column.isColumnAvailableInTimeRange(range)) {
+          throwException(column);
+        }
+      }
+    }
+
+    // Look at referenced columns through denormalization resolver
+    // and do column life validation
+    Map<String, Set<DenormalizationResolver.ReferencedQueriedColumn>> refCols =
+        cubeql.getDeNormCtx().getReferencedCols();
+    for (String col : refCols.keySet()) {
+      Iterator<DenormalizationResolver.ReferencedQueriedColumn> refColIter = 
refCols.get(col).iterator();
+      while (refColIter.hasNext()) {
+        DenormalizationResolver.ReferencedQueriedColumn refCol = 
refColIter.next();
+        for (TimeRange range : cubeql.getTimeRanges()) {
+          if (!refCol.col.isColumnAvailableInTimeRange(range)) {
+            log.debug("The refernced column: {} is not in the range queried", 
refCol.col.getName());
+            refColIter.remove();
+            break;
+          }
+        }
+      }
+    }
+
+    // Remove join paths that have columns with invalid life span
+    AutoJoinContext joinContext = cubeql.getAutoJoinCtx();
+    if (joinContext == null) {
+      return;
+    }
+    // Get cube columns which are part of join chain
+    Set<String> joinColumns = 
joinContext.getAllJoinPathColumnsOfTable((AbstractCubeTable) cubeql.getCube());
+    if (joinColumns == null || joinColumns.isEmpty()) {
+      return;
+    }
+
+    // Loop over all cube columns part of join paths
+    for (String col : joinColumns) {
+      CubeColumn column = cubeql.getCube().getColumnByName(col);
+      for (TimeRange range : cubeql.getTimeRanges()) {
+        if (!column.isColumnAvailableInTimeRange(range)) {
+          log.info("Timerange queried is not in column life for {}, Removing 
join paths containing the column", column);
+          // Remove join paths containing this column
+          Map<Aliased<Dimension>, List<JoinPath>> allPaths = 
joinContext.getAllPaths();
+
+          for (Aliased<Dimension> dimension : allPaths.keySet()) {
+            List<JoinPath> joinPaths = allPaths.get(dimension);
+            Iterator<JoinPath> joinPathIterator = joinPaths.iterator();
+
+            while (joinPathIterator.hasNext()) {
+              JoinPath path = joinPathIterator.next();
+              if (path.containsColumnOfTable(col, (AbstractCubeTable) 
cubeql.getCube())) {
+                log.info("Removing join path: {} as columns :{} is not 
available in the range", path, col);
+                joinPathIterator.remove();
+                if (joinPaths.isEmpty()) {
+                  // This dimension doesn't have any paths left
+                  throw new 
LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
+                      "No valid join path available for dimension " + 
dimension + " which would satisfy time range "
+                          + range.getFromDate() + "-" + range.getToDate());
+                }
+              }
+            } // End loop to remove path
+
+          } // End loop for all paths
+        }
+      } // End time range loop
+    } // End column loop
+  }
+
+
+  private void throwException(CubeColumn column) throws 
ColUnAvailableInTimeRangeException {
+
+    final Long availabilityStartTime = 
(column.getStartTimeMillisSinceEpoch().isPresent())
+        ? column.getStartTimeMillisSinceEpoch().get() : null;
+
+    final Long availabilityEndTime = 
column.getEndTimeMillisSinceEpoch().isPresent()
+        ? column.getEndTimeMillisSinceEpoch().get() : null;
+
+    ColUnAvailableInTimeRange col = new 
ColUnAvailableInTimeRange(column.getName(), availabilityStartTime,
+        availabilityEndTime);
+
+    throw new ColUnAvailableInTimeRangeException(col);
+  }
+
+  private void doFactRangeValidation(CubeQueryContext cubeql) {
+    Iterator<CandidateFact> iter = cubeql.getCandidateFacts().iterator();
+    while (iter.hasNext()) {
+      CandidateFact cfact = iter.next();
+      List<TimeRange> invalidTimeRanges = Lists.newArrayList();
+      for (TimeRange timeRange : cubeql.getTimeRanges()) {
+        if (!cfact.isValidForTimeRange(timeRange)) {
+          invalidTimeRanges.add(timeRange);
+        }
+      }
+      if (!invalidTimeRanges.isEmpty()){
+        cubeql.addFactPruningMsgs(cfact.fact, 
CandidateTablePruneCause.factNotAvailableInRange(invalidTimeRanges));
+        log.info("Not considering {} as it's not available for time ranges: 
{}", cfact, invalidTimeRanges);
+        iter.remove();
+      }
+    }
+    
cubeql.pruneCandidateFactSet(CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE);
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
index 06ba148..8defdc3 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimerangeResolver.java
@@ -22,13 +22,8 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
 import java.util.*;
 
-import org.apache.lens.cube.error.ColUnAvailableInTimeRange;
-import org.apache.lens.cube.error.ColUnAvailableInTimeRangeException;
 import org.apache.lens.cube.error.LensCubeErrorCode;
 import org.apache.lens.cube.metadata.*;
-import org.apache.lens.cube.metadata.join.JoinPath;
-import 
org.apache.lens.cube.parse.DenormalizationResolver.ReferencedQueriedColumn;
-import org.apache.lens.cube.parse.join.AutoJoinContext;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 
@@ -37,7 +32,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 
-import com.google.common.collect.Lists;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -54,8 +48,6 @@ class TimerangeResolver implements ContextRewriter {
       return;
     }
     extractTimeRange(cubeql);
-    doColLifeValidation(cubeql);
-    doFactRangeValidation(cubeql);
   }
 
 
@@ -145,121 +137,5 @@ class TimerangeResolver implements ContextRewriter {
     cubeql.getTimeRanges().add(range);
   }
 
-  private void doColLifeValidation(CubeQueryContext cubeql) throws 
LensException,
-    ColUnAvailableInTimeRangeException {
-    Set<String> cubeColumns = 
cubeql.getColumnsQueried(cubeql.getCube().getName());
-    if (cubeColumns == null || cubeColumns.isEmpty()) {
-      // Query doesn't have any columns from cube
-      return;
-    }
-
-    for (String col : cubeql.getColumnsQueried(cubeql.getCube().getName())) {
-      CubeColumn column = cubeql.getCube().getColumnByName(col);
-      for (TimeRange range : cubeql.getTimeRanges()) {
-        if (column == null) {
-          if (!cubeql.getCube().getTimedDimensions().contains(col)) {
-            throw new 
LensException(LensCubeErrorCode.NOT_A_CUBE_COLUMN.getLensErrorInfo(), col);
-          }
-          continue;
-        }
-        if (!column.isColumnAvailableInTimeRange(range)) {
-          throwException(column);
-        }
-      }
-    }
-
-    // Look at referenced columns through denormalization resolver
-    // and do column life validation
-    Map<String, Set<ReferencedQueriedColumn>> refCols = 
cubeql.getDeNormCtx().getReferencedCols();
-    for (String col : refCols.keySet()) {
-      Iterator<ReferencedQueriedColumn> refColIter = 
refCols.get(col).iterator();
-      while (refColIter.hasNext()) {
-        ReferencedQueriedColumn refCol = refColIter.next();
-        for (TimeRange range : cubeql.getTimeRanges()) {
-          if (!refCol.col.isColumnAvailableInTimeRange(range)) {
-            log.debug("The refernced column: {} is not in the range queried", 
refCol.col.getName());
-            refColIter.remove();
-            break;
-          }
-        }
-      }
-    }
-
-    // Remove join paths that have columns with invalid life span
-    AutoJoinContext joinContext = cubeql.getAutoJoinCtx();
-    if (joinContext == null) {
-      return;
-    }
-    // Get cube columns which are part of join chain
-    Set<String> joinColumns = 
joinContext.getAllJoinPathColumnsOfTable((AbstractCubeTable) cubeql.getCube());
-    if (joinColumns == null || joinColumns.isEmpty()) {
-      return;
-    }
-
-    // Loop over all cube columns part of join paths
-    for (String col : joinColumns) {
-      CubeColumn column = cubeql.getCube().getColumnByName(col);
-      for (TimeRange range : cubeql.getTimeRanges()) {
-        if (!column.isColumnAvailableInTimeRange(range)) {
-          log.info("Timerange queried is not in column life for {}, Removing 
join paths containing the column", column);
-          // Remove join paths containing this column
-          Map<Aliased<Dimension>, List<JoinPath>> allPaths = 
joinContext.getAllPaths();
-
-          for (Aliased<Dimension> dimension : allPaths.keySet()) {
-            List<JoinPath> joinPaths = allPaths.get(dimension);
-            Iterator<JoinPath> joinPathIterator = joinPaths.iterator();
 
-            while (joinPathIterator.hasNext()) {
-              JoinPath path = joinPathIterator.next();
-              if (path.containsColumnOfTable(col, (AbstractCubeTable) 
cubeql.getCube())) {
-                log.info("Removing join path: {} as columns :{} is not 
available in the range", path, col);
-                joinPathIterator.remove();
-                if (joinPaths.isEmpty()) {
-                  // This dimension doesn't have any paths left
-                  throw new 
LensException(LensCubeErrorCode.NO_JOIN_PATH.getLensErrorInfo(),
-                      "No valid join path available for dimension " + 
dimension + " which would satisfy time range "
-                          + range.getFromDate() + "-" + range.getToDate());
-                }
-              }
-            } // End loop to remove path
-
-          } // End loop for all paths
-        }
-      } // End time range loop
-    } // End column loop
-  }
-
-
-  private void throwException(CubeColumn column) throws 
ColUnAvailableInTimeRangeException {
-
-    final Long availabilityStartTime = 
(column.getStartTimeMillisSinceEpoch().isPresent())
-      ? column.getStartTimeMillisSinceEpoch().get() : null;
-
-    final Long availabilityEndTime = 
column.getEndTimeMillisSinceEpoch().isPresent()
-      ? column.getEndTimeMillisSinceEpoch().get() : null;
-
-    ColUnAvailableInTimeRange col = new 
ColUnAvailableInTimeRange(column.getName(), availabilityStartTime,
-      availabilityEndTime);
-
-    throw new ColUnAvailableInTimeRangeException(col);
-  }
-
-  private void doFactRangeValidation(CubeQueryContext cubeql) {
-    Iterator<CandidateFact> iter = cubeql.getCandidateFacts().iterator();
-    while (iter.hasNext()) {
-      CandidateFact cfact = iter.next();
-      List<TimeRange> invalidTimeRanges = Lists.newArrayList();
-      for (TimeRange timeRange : cubeql.getTimeRanges()) {
-        if (!cfact.isValidForTimeRange(timeRange)) {
-          invalidTimeRanges.add(timeRange);
-        }
-      }
-      if (!invalidTimeRanges.isEmpty()){
-        cubeql.addFactPruningMsgs(cfact.fact, 
CandidateTablePruneCause.factNotAvailableInRange(invalidTimeRanges));
-        log.info("Not considering {} as it's not available for time ranges: 
{}", cfact, invalidTimeRanges);
-        iter.remove();
-      }
-    }
-    
cubeql.pruneCandidateFactSet(CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE);
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 2631f40..f7f8af2 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -987,6 +987,11 @@ public class CubeTestSetup {
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("userid", "int", 
"userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("xuserid", "int", 
"userid")));
     cubeDimensions2.add(new BaseDimAttribute(new FieldSchema("yuserid", "int", 
"userid")));
+    cubeDimensions2.add(new BaseDimAttribute(new 
FieldSchema("user_id_added_in_past", "int", "user_id_added_in_past")));
+    cubeDimensions2.add(new BaseDimAttribute(new 
FieldSchema("user_id_added_far_future", "int",
+        "user_id_added_far_future")));
+    cubeDimensions2.add(new BaseDimAttribute(new 
FieldSchema("user_id_deprecated", "int", "user_id_deprecated")));
+
     cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("xsports", 
"array<string>", ""),
       "xuser sports", "xusersports", "name", null, null, null));
     cubeDimensions2.add(new ReferencedDimAttribute(new FieldSchema("ysports", 
"array<string>", ""),
@@ -1004,7 +1009,6 @@ public class CubeTestSetup {
     refCols.add(new ChainRefCol("cubeCityStateCountry", "capital"));
     cubeDimensions2.add(new ReferencedDimAttribute(new 
FieldSchema("cubeCountryCapital", "String", "ref dim"),
       "Country capital", refCols, null, null, null, null));
-
     Map<String, String> cubeProperties = new HashMap<>();
     
cubeProperties.put(MetastoreUtil.getCubeTimedDimensionListKey(BASE_CUBE_NAME),
       "d_time,pt,it,et,test_time_dim,test_time_dim2");
@@ -1114,6 +1118,17 @@ public class CubeTestSetup {
         });
       }
     });
+    joinChains.add(new JoinChain("user_id_added_far_future_chain", 
"user_id_added_far_future_chain",
+        "user_id_added_far_future_chain") {
+      {
+        addPath(new ArrayList<TableReference>() {
+          {
+            add(new TableReference("basecube", "user_id_added_far_future"));
+            add(new TableReference("userdim", "user_id_added_far_future"));
+          }
+        });
+      }
+    });
     joinChains.add(new JoinChain("userSports", "user-sports", "user sports") {
       {
         addPath(new ArrayList<TableReference>() {
@@ -1433,10 +1448,32 @@ public class CubeTestSetup {
 
     storageTables = new HashMap<String, StorageTableDesc>();
     storageTables.put(c1, s1);
-
+    
properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_in_past"),
 "2016-01-01");
+    
properties.put(MetastoreConstants.FACT_COL_END_TIME_PFX.concat("user_id_deprecated"),
 "2016-01-01");
+    
properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_far_future"),
 "2099-01-01");
     client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 100L, properties,
       storageTables);
 
+    factName = "testFact4_RAW_BASE";
+    factColumns = new ArrayList<FieldSchema>();
+    factColumns.add(new FieldSchema("msr13", "double", "third measure"));
+    factColumns.add(new FieldSchema("msr14", "bigint", "fourth measure"));
+
+    // add dimensions of the cube
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema("processing_time", "timestamp", 
"processing time"));
+    factColumns.add(new FieldSchema("dim1", "string", "base dim"));
+    factColumns.add(new FieldSchema("user_id_added_in_past", "int", "user 
id"));
+    factColumns.add(new FieldSchema("user_id_added_far_future", "int", "user 
id"));
+    factColumns.add(new FieldSchema("user_id_deprecated", "int", "user id"));
+
+    storageTables = new HashMap<String, StorageTableDesc>();
+    storageTables.put(c1, s1);
+    
properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_in_past"),
 "2016-01-01");
+    
properties.put(MetastoreConstants.FACT_COL_END_TIME_PFX.concat("user_id_deprecated"),
 "2016-01-01");
+    
properties.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("user_id_added_far_future"),
 "2099-01-01");
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 100L, properties,
+        storageTables);
   }
 
   private void createCubeContinuousFact(CubeMetastoreClient client) throws 
Exception {
@@ -2703,6 +2740,8 @@ public class CubeTestSetup {
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("name", "string", 
"name")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("age", "string", 
"age")));
     dimAttrs.add(new BaseDimAttribute(new FieldSchema("gender", "string", 
"gender")));
+    dimAttrs.add(new BaseDimAttribute(new FieldSchema("gender", "string", 
"gender")));
+
     Map<String, String> dimProps = new HashMap<String, String>();
     dimProps.put(MetastoreUtil.getDimTimedDimensionKey(dimName), 
TestCubeMetastoreClient.getDatePartitionKey());
     Set<JoinChain> joinChains = new HashSet<JoinChain>();
@@ -2727,6 +2766,8 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("name", "string", "name"));
     dimColumns.add(new FieldSchema("age", "string", "age"));
     dimColumns.add(new FieldSchema("gender", "string", "gender"));
+    dimColumns.add(new FieldSchema("user_id_added_in_past", "int", 
"user_id_added_in_past"));
+    dimColumns.add(new FieldSchema("user_id_added_far_future", "int", 
"user_id_added_far_future"));
 
     Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, 
UpdatePeriod>();
     StorageTableDesc s1 = new StorageTableDesc();

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
index d42d494..84e18dc 100644
--- 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestBaseCubeQueries.java
@@ -113,7 +113,7 @@ public class TestBaseCubeQueries extends TestQueryRewrite {
      */
     boolean columnNotFound = false;
     List<String> testTimeDimFactTables = Arrays.asList("testfact3_base", 
"testfact1_raw_base", "testfact3_raw_base",
-      "testfact5_base", "testfact6_base");
+      "testfact5_base", "testfact6_base", "testfact4_raw_base");
     List<String> factTablesForMeasures = Arrays.asList("testfact_deprecated", 
"testfact2_raw_base", "testfact2_base");
     for (Map.Entry<String, List<CandidateTablePruneCause>> entry : 
pruneCauses.getDetails().entrySet()) {
       if 
(entry.getValue().contains(CandidateTablePruneCause.columnNotFound("test_time_dim")))
 {

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
index ed54f0c..b90d4d3 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestCubeRewriter.java
@@ -1535,6 +1535,26 @@ public class TestCubeRewriter extends TestQueryRewrite {
   }
 
   @Test
+  public void testFactColumnStartAndEndTime() throws Exception{
+    // Start time for dim attribute user_id_added_in_past is 2016-01-01
+    String query1 = "select user_id_added_in_past from basecube where " + 
TWO_DAYS_RANGE;
+    String hql1 = rewrite(query1, getConf());
+    assertTrue(hql1.contains("c1_testfact4_raw_base"));
+    // Start time for dim attribute user_id_added_far_future is 2099-01-01
+    String query2 = "select user_id_added_far_future from basecube where " + 
TWO_DAYS_RANGE;
+    LensException e1 = getLensExceptionInRewrite(query2, getConf());
+    assertTrue(e1.getMessage().contains("NO_FACT_HAS_COLUMN"));
+    // End time for dim attribute user_id_deprecated is 2016-01-01
+    String query3 = "select user_id_deprecated from basecube where " + 
TWO_DAYS_RANGE;
+    LensException e2 = getLensExceptionInRewrite(query3, getConf());
+    assertTrue(e2.getMessage().contains("NO_FACT_HAS_COLUMN"));
+    // Start time for ref column user_id_added_far_future_chain is 2099-01-01
+    String query4 = "select user_id_added_far_future_chain.name from basecube 
where " + TWO_DAYS_RANGE;
+    LensException e3 = getLensExceptionInRewrite(query4, getConf());
+    assertTrue(e3.getMessage().contains("NO_FACT_HAS_COLUMN"));
+  }
+
+  @Test
   public void testSelectDimonlyJoinOnCube() throws Exception {
     String query = "SELECT count (distinct cubecity.name) from testCube where 
" + TWO_DAYS_RANGE;
     Configuration conf = new Configuration(getConf());

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
index 857bc90..27a18f4 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestQueryMetrics.java
@@ -45,26 +45,27 @@ public class TestQueryMetrics extends TestQueryRewrite {
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
 
     Assert.assertTrue(reg.getGauges().keySet().containsAll(Arrays.asList(
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AggregateResolver-ITER-5",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AggregateResolver-ITER-6",
         
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.AliasReplacer-ITER-1",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-10",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-4",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-11",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.CandidateTableResolver-ITER-5",
         
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ColumnResolver-ITER-0",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-15",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-16",
         
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.DenormalizationResolver-ITER-3",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-16",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-17",
         
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.ExpressionResolver-ITER-2",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.FieldValidator-ITER-7",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.GroupbyResolver-ITER-6",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-8",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-18",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-19",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-17",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.MaxCoveringFactResolver-ITER-13",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-11",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.FieldValidator-ITER-8",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.GroupbyResolver-ITER-7",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.JoinResolver-ITER-9",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LeastPartitionResolver-ITER-19",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestDimensionResolver-ITER-20",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.LightestFactResolver-ITER-18",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.MaxCoveringFactResolver-ITER-14",
         
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-12",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-14",
-        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimerangeResolver-ITER-9")
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-13",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.StorageTableResolver-ITER-15",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimeRangeChecker-ITER-10",
+        
"lens.MethodMetricGauge.testCubeRewriteStackName-org.apache.lens.cube.parse.TimerangeResolver-ITER-4")
     ), reg.getGauges().keySet().toString());
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-examples/src/main/resources/fact1.xml
----------------------------------------------------------------------
diff --git a/lens-examples/src/main/resources/fact1.xml 
b/lens-examples/src/main/resources/fact1.xml
index effdfac..3f438a6 100644
--- a/lens-examples/src/main/resources/fact1.xml
+++ b/lens-examples/src/main/resources/fact1.xml
@@ -26,6 +26,8 @@
     <column comment="" name="measure2" _type="BIGINT"/>
     <column comment="" name="measure3" _type="INT"/>
     <column comment="" name="measure4" _type="FLOAT"/>
+    <column comment="" name="measure5" _type="FLOAT" start_time="2015-01-01"/>
+    <column comment="" name="measure6" _type="FLOAT" end_time="2015-01-01"/>
   </columns>
   <properties>
     <property name="cube.fact.is.aggregated" value="true"/>

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-examples/src/test/resources/yaml/fact1.yaml
----------------------------------------------------------------------
diff --git a/lens-examples/src/test/resources/yaml/fact1.yaml 
b/lens-examples/src/test/resources/yaml/fact1.yaml
index 4252e33..c5c6d57 100644
--- a/lens-examples/src/test/resources/yaml/fact1.yaml
+++ b/lens-examples/src/test/resources/yaml/fact1.yaml
@@ -18,6 +18,12 @@ columns:
   measure2: type: BIGINT
   measure3: type: INT
   measure4: type: FLOAT
+  measure5:
+    type: FLOAT
+    startTime: 2015-01-01
+  measure6:
+    type: FLOAT
+    endTime: 2015-01-01
 properties:
   cube.fact.is.aggregated: true
 storageTables:

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
 
b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index c1f9a02..31b8ce4 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -430,11 +430,17 @@ public class CubeMetastoreServiceImpl extends 
BaseLensService implements CubeMet
         JAXBUtils.fieldSchemaListFromColumns(fact.getColumns()),
         
JAXBUtils.getFactUpdatePeriodsFromStorageTables(fact.getStorageTables()),
         fact.getWeight(),
-        JAXBUtils.mapFromXProperties(fact.getProperties()),
+        addFactColStartTimePropertyToFactProperties(fact),
         JAXBUtils.storageTableMapFromXStorageTables(fact.getStorageTables()));
       log.info("Created fact table " + fact.getName());
     }
   }
+  public  Map<String, String> 
addFactColStartTimePropertyToFactProperties(XFactTable fact) {
+    Map<String, String> props = new HashMap<String, String>();
+    props.putAll(JAXBUtils.mapFromXProperties(fact.getProperties()));
+    
props.putAll(JAXBUtils.columnStartAndEndTimeFromXColumns(fact.getColumns()));
+    return props;
+  }
 
   @Override
   public void createSegmentation(LensSessionHandle sessionid, XSegmentation 
cubeSeg) throws LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java 
b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
index 1b6b819..51fcb43 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/JAXBUtils.java
@@ -560,6 +560,22 @@ public final class JAXBUtils {
     return null;
   }
 
+  public static Map<String, String> columnStartAndEndTimeFromXColumns(XColumns 
columns) {
+    if (columns != null && !columns.getColumn().isEmpty()) {
+      Map<String, String> colStartTimeMap = new HashMap<String, String>();
+      for (XColumn c : columns.getColumn()) {
+        if (!(c.getStartTime() == null)) {
+          
colStartTimeMap.put(MetastoreConstants.FACT_COL_START_TIME_PFX.concat(c.getName()),
 c.getStartTime());
+        }
+        if (!(c.getEndTime() == null)) {
+          
colStartTimeMap.put(MetastoreConstants.FACT_COL_END_TIME_PFX.concat(c.getName()),
 c.getEndTime());
+        }
+      }
+      return colStartTimeMap;
+    }
+    return null;
+  }
+
   public static List<XColumn> columnsFromFieldSchemaList(List<FieldSchema> 
fslist) {
     List<XColumn> cols = new ArrayList<XColumn>();
     if (fslist == null || fslist.isEmpty()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/934f8457/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
 
b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 61bc133..be1326f 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -1812,6 +1812,13 @@ public class TestMetastoreService extends LensJerseyTest 
{
     c2.setComment("col2");
     f.getColumns().getColumn().add(c2);
 
+    XColumn c3 = cubeObjectFactory.createXColumn();
+    c3.setName("c3");
+    c3.setType("STRING");
+    c3.setComment("col3");
+    c3.setStartTime("2016-01-01");
+    c3.setEndTime("2017-01-01");
+    f.getColumns().getColumn().add(c3);
 
     Map<String, String> properties = LensUtil.getHashMap("foo", "bar");
     
f.getProperties().getProperty().addAll(JAXBUtils.xPropertiesFromMap(properties));
@@ -1864,6 +1871,11 @@ public class TestMetastoreService extends LensJerseyTest 
{
         }
       }
 
+      //Check for column with start time
+      Map<String, String> props = 
JAXBUtils.mapFromXProperties(gotFact.getProperties());
+      
assertEquals(props.get(MetastoreConstants.FACT_COL_START_TIME_PFX.concat("c3")),
 "2016-01-01");
+      
assertEquals(props.get(MetastoreConstants.FACT_COL_END_TIME_PFX.concat("c3")), 
"2017-01-01");
+
       assertTrue(foundC1);
       assertEquals(cf.getProperties().get("foo"), "bar");
       assertTrue(cf.getStorages().contains("S1"));

Reply via email to