Repository: lens
Updated Branches:
  refs/heads/lens-1381 [created] b6f0cc3d4


http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
index 89b50f5..fe867c7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/TimeRangeChecker.java
@@ -42,6 +42,7 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class TimeRangeChecker implements ContextRewriter {
   public TimeRangeChecker(Configuration conf) {
+
   }
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
@@ -49,7 +50,6 @@ public class TimeRangeChecker implements ContextRewriter {
       return;
     }
     doColLifeValidation(cubeql);
-    doFactRangeValidation(cubeql);
   }
   private void extractTimeRange(CubeQueryContext cubeql) throws LensException {
     // get time range -
@@ -137,6 +137,7 @@ public class TimeRangeChecker implements ContextRewriter {
     cubeql.getTimeRanges().add(range);
   }
 
+  //TODO union: This can be executed before finding CoveringSets but after 
denormresolver and joinresolver
   private void doColLifeValidation(CubeQueryContext cubeql) throws 
LensException,
       ColUnAvailableInTimeRangeException {
     Set<String> cubeColumns = 
cubeql.getColumnsQueriedForTable(cubeql.getCube().getName());
@@ -222,7 +223,6 @@ public class TimeRangeChecker implements ContextRewriter {
     } // End column loop
   }
 
-
   private void throwException(CubeColumn column) throws 
ColUnAvailableInTimeRangeException {
 
     final Long availabilityStartTime = 
(column.getStartTimeMillisSinceEpoch().isPresent())
@@ -236,23 +236,4 @@ public class TimeRangeChecker implements ContextRewriter {
 
     throw new ColUnAvailableInTimeRangeException(col);
   }
-
-  private void doFactRangeValidation(CubeQueryContext cubeql) {
-    Iterator<CandidateFact> iter = cubeql.getCandidateFacts().iterator();
-    while (iter.hasNext()) {
-      CandidateFact cfact = iter.next();
-      List<TimeRange> invalidTimeRanges = Lists.newArrayList();
-      for (TimeRange timeRange : cubeql.getTimeRanges()) {
-        if (!cfact.isValidForTimeRange(timeRange)) {
-          invalidTimeRanges.add(timeRange);
-        }
-      }
-      if (!invalidTimeRanges.isEmpty()){
-        cubeql.addFactPruningMsgs(cfact.fact, 
CandidateTablePruneCause.factNotAvailableInRange(invalidTimeRanges));
-        log.info("Not considering {} as it's not available for time ranges: 
{}", cfact, invalidTimeRanges);
-        iter.remove();
-      }
-    }
-    
cubeql.pruneCandidateFactSet(CandidateTablePruneCause.CandidateTablePruneCode.FACT_NOT_AVAILABLE_IN_RANGE);
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
new file mode 100644
index 0000000..ce28b7e
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionCandidate.java
@@ -0,0 +1,247 @@
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.server.api.error.LensException;
+
+import lombok.Getter;
+
+/**
+ * Represents a union of two candidates
+ */
+public class UnionCandidate implements Candidate {
+
+  /**
+   * Caching start and end time calculated for this candidate as it may have 
many child candidates.
+   */
+  Date startTime = null;
+  Date endTime = null;
+  String toStr;
+  @Getter
+  String alias;
+  /**
+   * List of child candidates that will be union-ed
+   */
+  private List<Candidate> childCandidates;
+
+  public UnionCandidate(List<Candidate> childCandidates, String alias) {
+    this.childCandidates = childCandidates;
+    this.alias = alias;
+  }
+
+  @Override
+  public String toHQL() {
+    return null;
+  }
+
+  @Override
+  public QueryAST getQueryAst() {
+    return null;
+  }
+
+  @Override
+  public Collection<String> getColumns() {
+    return null;
+  }
+
+  @Override
+  public Date getStartTime() {
+    //Note: concurrent calls not handled specifically (This should not be a 
problem even if we do
+    //get concurrent calls).
+
+    if (startTime == null) {
+      Date minStartTime = childCandidates.get(0).getStartTime();
+      for (Candidate child : childCandidates) {
+        if (child.getStartTime().before(minStartTime)) {
+          minStartTime = child.getStartTime();
+        }
+      }
+      startTime = minStartTime;
+    }
+    return startTime;
+  }
+
+  @Override
+  public Date getEndTime() {
+    if (endTime == null) {
+      Date maxEndTime = childCandidates.get(0).getEndTime();
+      for (Candidate child : childCandidates) {
+        if (child.getEndTime().after(maxEndTime)) {
+          maxEndTime = child.getEndTime();
+        }
+      }
+      endTime = maxEndTime;
+    }
+    return endTime;
+  }
+
+  @Override
+  public double getCost() {
+    double cost = 0.0;
+    for (Candidate cand : childCandidates) {
+      cost += cand.getCost();
+    }
+    return cost;
+  }
+
+  @Override
+  public boolean contains(Candidate candidate) {
+    if (this.equals(candidate)) {
+      return true;
+    }
+
+    for (Candidate child : childCandidates) {
+      if (child.contains((candidate)))
+        return true;
+    }
+    return false;
+  }
+
+  @Override
+  public Collection<Candidate> getChildren() {
+    return childCandidates;
+  }
+
+  /**
+   * @param timeRange
+   * @return
+   */
+  @Override
+  public boolean evaluateCompleteness(TimeRange timeRange, boolean 
failOnPartialData) throws LensException {
+    Map<Candidate, TimeRange> candidateRange = 
getTimeRangeForChildren(timeRange);
+    boolean ret = true;
+    for (Map.Entry<Candidate, TimeRange> entry : candidateRange.entrySet()) {
+      ret &= entry.getKey().evaluateCompleteness(entry.getValue(), 
failOnPartialData);
+    }
+    return ret;
+  }
+
+  @Override
+  public Set<FactPartition> getParticipatingPartitions() {
+    return null;
+  }
+
+  @Override
+  public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext 
expr) {
+    for (Candidate cand : childCandidates) {
+      if (!cand.isExpressionEvaluable(expr)) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Override
+  public String toString() {
+    if (this.toStr == null) {
+      this.toStr = getToString();
+    }
+    return this.toStr;
+  }
+
+  private String getToString() {
+    StringBuilder builder = new StringBuilder(10 * childCandidates.size());
+    builder.append("UNION[");
+    for (Candidate candidate : childCandidates) {
+      builder.append(candidate.toString());
+      builder.append(", ");
+    }
+    builder.delete(builder.length() - 2, builder.length());
+    builder.append("]");
+    return builder.toString();
+  }
+
+  private Map<Candidate, TimeRange> getTimeRangeForChildren(TimeRange 
timeRange) {
+    Collections.sort(childCandidates, new Comparator<Candidate>() {
+      @Override
+      public int compare(Candidate o1, Candidate o2) {
+        return o1.getCost() < o2.getCost() ? -1 : o1.getCost() == o2.getCost() 
? 0 : 1;
+      }
+    });
+
+    Map<Candidate, TimeRange> candidateTimeRangeMap = new HashMap<>();
+    // Sorted list based on the weights.
+    Set<TimeRange> ranges = new HashSet<>();
+
+    ranges.add(timeRange);
+    for (Candidate c : childCandidates) {
+      TimeRange.TimeRangeBuilder builder = getClonedBuiler(timeRange);
+      TimeRange tr = resolveTimeRange(c, ranges, builder);
+      if (tr != null) {
+        // If the time range is not null it means this child candidate is 
valid for this union candidate.
+        candidateTimeRangeMap.put(c, tr);
+      }
+    }
+    return candidateTimeRangeMap;
+  }
+
+  private TimeRange resolveTimeRange(Candidate c, Set<TimeRange> ranges, 
TimeRange.TimeRangeBuilder builder) {
+    Iterator<TimeRange> it = ranges.iterator();
+    Set<TimeRange> newTimeRanges = new HashSet<>();
+    TimeRange ret = null;
+    while (it.hasNext()) {
+      TimeRange range = it.next();
+      // Check for out of range
+      if (c.getStartTime().getTime() >= range.getToDate().getTime() || 
c.getEndTime().getTime() <= range.getFromDate()
+        .getTime()) {
+        continue;
+      }
+      // This means overlap.
+      if (c.getStartTime().getTime() <= range.getFromDate().getTime()) {
+        // Start time of the new time range will be range.getFromDate()
+        builder.fromDate(range.getFromDate());
+        if (c.getEndTime().getTime() <= range.getToDate().getTime()) {
+          // End time is in the middle of the range is equal to c.getEndTime().
+          builder.toDate(c.getEndTime());
+        } else {
+          // End time will be range.getToDate()
+          builder.toDate(range.getToDate());
+        }
+      } else {
+        builder.fromDate(c.getStartTime());
+        if (c.getEndTime().getTime() <= range.getToDate().getTime()) {
+          builder.toDate(c.getEndTime());
+        } else {
+          builder.toDate(range.getToDate());
+        }
+      }
+      // Remove the time range and add more time ranges.
+      it.remove();
+      ret = builder.build();
+      if (ret.getFromDate().getTime() == range.getFromDate().getTime()) {
+        if (ret.getToDate().getTime() < range.getToDate().getTime()) {
+          // The end time is the start time of the new range.
+          TimeRange.TimeRangeBuilder b1 = getClonedBuiler(ret);
+          b1.fromDate(ret.getFromDate());
+          b1.toDate(range.getToDate());
+          newTimeRanges.add(b1.build());
+        }
+      } else {
+        TimeRange.TimeRangeBuilder b1 = getClonedBuiler(ret);
+        b1.fromDate(range.getFromDate());
+        b1.toDate(ret.getFromDate());
+        newTimeRanges.add(b1.build());
+        if (ret.getToDate().getTime() < range.getToDate().getTime()) {
+          TimeRange.TimeRangeBuilder b2 = getClonedBuiler(ret);
+          b2.fromDate(ret.getToDate());
+          b2.toDate(range.getToDate());
+          newTimeRanges.add(b2.build());
+        }
+      }
+      break;
+    }
+    ranges.addAll(newTimeRanges);
+    return ret;
+  }
+
+  private TimeRange.TimeRangeBuilder getClonedBuiler(TimeRange timeRange) {
+    TimeRange.TimeRangeBuilder builder = new TimeRange.TimeRangeBuilder();
+    builder.astNode(timeRange.getAstNode());
+    builder.childIndex(timeRange.getChildIndex());
+    builder.parent(timeRange.getParent());
+    builder.partitionColumn(timeRange.getPartitionColumn());
+    return builder;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
new file mode 100644
index 0000000..cae66d5
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/UnionQueryWriter.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.lens.cube.parse;
+
+/**
+ * This  is a helper that is used for creating QueryAst for UnionCandidate
+ */
+public class UnionQueryWriter {
+
+  private UnionCandidate candidate;
+
+  private  SimpleHQLContext simpleHQLContext;
+
+  private  QueryAST ast;
+
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
index 3d5c5ac..ab7a0f9 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/join/AutoJoinContext.java
@@ -169,6 +169,7 @@ public class AutoJoinContext {
     joinPathFromColumns.remove(dim);
   }
 
+  //TODO union: use StaorgeCandidate
   public String getFromString(String fromTable, CandidateFact fact, 
Set<Dimension> qdims,
     Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext cubeql, 
QueryAST ast) throws LensException {
     String fromString = fromTable;
@@ -347,6 +348,16 @@ public class AutoJoinContext {
     return allPaths;
   }
 
+  //TODO union: use Set<StorageCandidate>
+  /**
+   * Prunes the join chains defined in Cube whose starting column is not there 
in any of the candidate facts.
+   * Same is done in case of join paths defined in Dimensions.
+   *
+   * @param cube
+   * @param cfacts
+   * @param dimsToQuery
+   * @throws LensException
+   */
   public void pruneAllPaths(CubeInterface cube, final Set<CandidateFact> 
cfacts,
     final Map<Dimension, CandidateDim> dimsToQuery) throws LensException {
     // Remove join paths which cannot be satisfied by the resolved candidate
@@ -355,6 +366,7 @@ public class AutoJoinContext {
       // include columns from all picked facts
       Set<String> factColumns = new HashSet<>();
       for (CandidateFact cFact : cfacts) {
+        //Use StoargeCandidate.getColumns()
         factColumns.addAll(cFact.getColumns());
       }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java 
b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
index fd6c30d..a5ae425 100644
--- a/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
+++ b/lens-cube/src/main/java/org/apache/lens/driver/cube/RewriterPlan.java
@@ -23,8 +23,7 @@ import java.util.HashSet;
 import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
-import org.apache.lens.cube.parse.CandidateTable;
-import org.apache.lens.cube.parse.CubeQueryContext;
+import org.apache.lens.cube.parse.*;
 import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.driver.DriverQueryPlan;
 import org.apache.lens.server.api.error.LensException;
@@ -49,23 +48,24 @@ public final class RewriterPlan extends DriverQueryPlan {
 
     for (CubeQueryContext ctx : cubeQueries) {
       if (ctx.getPickedDimTables() != null && 
!ctx.getPickedDimTables().isEmpty()) {
-        for (CandidateTable dim : ctx.getPickedDimTables()) {
-          addTablesQueried(dim.getStorageTables());
+        for (CandidateDim dim : ctx.getPickedDimTables()) {
+          addTablesQueried(dim.getStorageName());
           if (partitions.get(dim.getName()) == null || 
partitions.get(dim.getName()).isEmpty()) {
             // puts storage table to latest part
-            partitions.put(dim.getName(), dim.getPartsQueried());
+            partitions.put(dim.getName(), dim.getParticipatingPartitions());
           }
         }
       }
-      if (ctx.getPickedFacts() != null && !ctx.getPickedFacts().isEmpty()) {
-        for (CandidateTable fact : ctx.getPickedFacts()) {
-          addTablesQueried(fact.getStorageTables());
-          Set<FactPartition> factParts = (Set<FactPartition>) 
partitions.get(fact.getName());
+      //TODO union: updated code to work on picked Candidate
+      if (ctx.getPickedCandidate() != null) {
+        for (StorageCandidate sc : 
CandidateUtil.getStorageCandidates(ctx.getPickedCandidate())) {
+          addTablesQueried(sc.getStorageName());
+          Set<FactPartition> factParts = (Set<FactPartition>) 
partitions.get(sc.getName());
           if (factParts == null) {
             factParts = new HashSet<FactPartition>();
-            partitions.put(fact.getName(), factParts);
+            partitions.put(sc.getName(), factParts);
           }
-          factParts.addAll((Set<FactPartition>) fact.getPartsQueried());
+          factParts.addAll((Set<FactPartition>) 
sc.getParticipatingPartitions());
         }
       }
       for (String table : getTablesQueried()) {

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 41ea83d..90be92d 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -537,8 +537,17 @@ public class CubeTestSetup {
       "New measure", null, null, null, NOW, null, 100.0));
     cubeMeasures.add(new ColumnMeasure(new FieldSchema("msr15", "int", 
"fifteenth measure"), "Measure15", null, "SUM",
       "RS"));
+    String prefix = "union_join_ctx_";
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema(prefix + "msr1", "int", 
prefix + "first measure")));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema(prefix + "msr2", "int", 
prefix + "second measure")));
+    cubeMeasures.add(new ColumnMeasure(new FieldSchema(prefix + "msr3", "int", 
prefix + "third measure")));
 
     cubeDimensions = new HashSet<CubeDimAttribute>();
+
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema(prefix + "d_time", 
"timestamp", "d time")));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema(prefix + "cityid", 
"timestamp", "the cityid ")));
+    cubeDimensions.add(new BaseDimAttribute(new FieldSchema(prefix + 
"zipcode", "timestamp", "the zipcode")));
+
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("d_time", 
"timestamp", "d time")));
     cubeDimensions.add(new BaseDimAttribute(new FieldSchema("processing_time", 
"timestamp", "processing time")));
     List<CubeDimAttribute> locationHierarchy = new 
ArrayList<CubeDimAttribute>();
@@ -1268,6 +1277,113 @@ public class CubeTestSetup {
 
     // create base cube facts
     createBaseCubeFacts(client);
+    // create join and union ctx facts
+    createUnionAndJoinContextFacts(client);
+  }
+
+  private void createUnionAndJoinContextFacts(CubeMetastoreClient client)   
throws HiveException, LensException {
+    String prefix = "union_join_ctx_";
+    String derivedCubeName = prefix + "der1";
+    Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new 
HashMap<String, Set<UpdatePeriod>>();
+    Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
+    updates.add(DAILY);
+
+    ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
+    List<String> timePartCols = new ArrayList<String>();
+    partCols.add(TestCubeMetastoreClient.getDatePartition());
+    timePartCols.add(TestCubeMetastoreClient.getDatePartitionKey());
+
+    StorageTableDesc s1 = new StorageTableDesc();
+    s1.setInputFormat(TextInputFormat.class.getCanonicalName());
+    s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
+    s1.setPartCols(partCols);
+    s1.setTimePartCols(timePartCols);
+
+    storageAggregatePeriods.put(c1, updates);
+
+    Map<String, StorageTableDesc> storageTables = new HashMap<String, 
StorageTableDesc>();
+    storageTables.put(c1, s1);
+
+    // create fact1 (all dim attributes only msr1)
+    String factName = prefix + "fact1";
+    List<FieldSchema> factColumns = new ArrayList<FieldSchema>();
+    factColumns.add(new ColumnMeasure(new FieldSchema(prefix + "msr1", "int", 
"first measure")).getColumn());
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema(prefix + "zipcode", "int", "zip"));
+    factColumns.add(new FieldSchema(prefix + "cityid", "int", "city id"));
+    // add fact start and end time property
+    Map<String, String> properties = Maps.newHashMap(factValidityProperties);
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day - 90 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day - 30 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    // create fact2 with same schema, but it starts after fact1 ends
+    factName = prefix + "fact2";
+    properties.clear();
+    //factColumns.add(new ColumnMeasure(new FieldSchema(prefix + "msr2", 
"int", "second measure")).getColumn());
+    // add fact start and end time property
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day - 31 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day + 7 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    // create fact3 (all dim attributes only msr2)
+    factName = prefix + "fact3";
+    factColumns.clear();
+    factColumns.add(new ColumnMeasure(new FieldSchema(prefix + "msr2", "int", 
"second measure")).getColumn());
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema(prefix + "zipcode", "int", "zip"));
+    factColumns.add(new FieldSchema(prefix + "cityid", "int", "city id"));
+    properties.clear();
+    // add fact start and end time property
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day - 90 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day + 7 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    // create fact4 will all all measures and entire timerange covered
+    factName = prefix + "fact4";
+    factColumns.add(new ColumnMeasure(new FieldSchema(prefix + "msr1", "int", 
"first measure")).getColumn());
+    properties.clear();
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day - 90 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day + 7 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    // create fact5 and fact6 with msr3 and covering timerange as set
+    factName = prefix + "fact5";
+    factColumns.clear();
+    factColumns.add(new FieldSchema("d_time", "timestamp", "event time"));
+    factColumns.add(new FieldSchema(prefix + "zipcode", "int", "zip"));
+    factColumns.add(new FieldSchema(prefix + "cityid", "int", "city id"));
+    factColumns.add(new ColumnMeasure(new FieldSchema(prefix + "msr3", "int", 
"third measure")).getColumn());
+    properties.clear();
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day - 90 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day -30 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    factName = prefix + "fact6";
+    properties.clear();
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_START_TIME, 
DateUtil.relativeToAbsolute("now.day -31 days"));
+    properties.put(MetastoreConstants.FACT_ABSOLUTE_END_TIME, 
DateUtil.relativeToAbsolute("now.day + 7 days"));
+    client.createCubeFactTable(BASE_CUBE_NAME, factName, factColumns, 
storageAggregatePeriods, 5L,
+        properties, storageTables);
+
+    // Create derived cube
+    Map<String, String> derivedProperties = new HashMap<>();
+    derivedProperties.put(MetastoreConstants.CUBE_ALL_FIELDS_QUERIABLE, 
"true");
+    Set<String> measures = new HashSet<>();
+    measures.add(prefix + "msr1");
+    measures.add(prefix + "msr2");
+    measures.add(prefix + "msr3");
+    Set<String> dimensions = new HashSet<>();
+    dimensions.add(prefix + "cityid");
+    dimensions.add(prefix + "zipcode");
+    dimensions.add("d_time");
+    client.createDerivedCube(BASE_CUBE_NAME, derivedCubeName, measures, 
dimensions, derivedProperties, 5L);
+
   }
 
   private void createBaseCubeFacts(CubeMetastoreClient client) throws 
HiveException, LensException {

http://git-wip-us.apache.org/repos/asf/lens/blob/b6f0cc3d/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
new file mode 100644
index 0000000..061224e
--- /dev/null
+++ 
b/lens-cube/src/test/java/org/apache/lens/cube/parse/TestUnionAndJoinCandidates.java
@@ -0,0 +1,65 @@
+package org.apache.lens.cube.parse;
+
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.lens.server.api.LensServerAPITestUtil;
+import org.apache.lens.server.api.error.LensException;
+import org.testng.annotations.BeforeTest;
+import org.testng.annotations.Test;
+
+import static org.apache.lens.cube.metadata.DateFactory.*;
+import static org.apache.lens.cube.parse.CubeQueryConfUtil.*;
+import static org.apache.lens.cube.parse.CubeTestSetup.*;
+
+public class TestUnionAndJoinCandidates extends TestQueryRewrite {
+
+  private Configuration testConf;
+
+  @BeforeTest
+  public void setupDriver() throws Exception {
+    testConf = LensServerAPITestUtil.getConfiguration(
+        DISABLE_AUTO_JOINS, false,
+        ENABLE_SELECT_TO_GROUPBY, true,
+        ENABLE_GROUP_BY_TO_SELECT, true,
+        DISABLE_AGGREGATE_RESOLVER, false,
+        ENABLE_STORAGES_UNION, true);
+  }
+
+  @Override
+  public Configuration getConf() {
+    return new Configuration(testConf);
+  }
+
+  @Test
+  public void testRangeCoveringCandidates() throws ParseException, 
LensException {
+    try {
+      String prefix = "union_join_ctx_";
+      String cubeName = prefix + "der1";
+      Configuration conf = 
LensServerAPITestUtil.getConfigurationWithParams(getConf(),
+          //Supported storage
+          CubeQueryConfUtil.DRIVER_SUPPORTED_STORAGES, "C1",
+          // Storage tables
+          getValidStorageTablesKey(prefix + "fact1"), "C1_" + prefix + "fact1",
+          getValidStorageTablesKey(prefix + "fact2"), "C1_" + prefix + "fact2",
+          getValidStorageTablesKey(prefix + "fact3"), "C1_" + prefix + "fact3",
+          // Update periods
+          getValidUpdatePeriodsKey(prefix + "fact1", "C1"), "DAILY",
+          getValidUpdatePeriodsKey(prefix + "fact2", "C1"), "DAILY",
+          getValidUpdatePeriodsKey(prefix + "fact3", "C1"), "DAILY");
+
+      String colsSelected = prefix + "cityid , " + prefix + "zipcode , " + 
"sum(" + prefix + "msr1) , "
+          + "sum(" + prefix + "msr2), " + "sum(" + prefix + "msr3) ";
+
+      String whereCond = prefix + "zipcode = 'a' and " + prefix + "cityid = 
'b' and " +
+          "(" + TWO_MONTHS_RANGE_UPTO_DAYS + ")";
+      String hqlQuery = rewrite("select " + colsSelected + " from " + cubeName 
+ " where " + whereCond, conf);
+
+      System.out.println(hqlQuery);
+
+    } finally {
+      getStorageToUpdatePeriodMap().clear();
+    }
+  }
+
+}

Reply via email to