Repository: calcite
Updated Branches:
  refs/heads/master 159bcab20 -> ac5cce707


[CALCITE-2089] Druid adapter: Push filter on floor(time) to Druid (Nishant 
Bangarwa)

Close apache/calcite#582


Project: http://git-wip-us.apache.org/repos/asf/calcite/repo
Commit: http://git-wip-us.apache.org/repos/asf/calcite/commit/ac5cce70
Tree: http://git-wip-us.apache.org/repos/asf/calcite/tree/ac5cce70
Diff: http://git-wip-us.apache.org/repos/asf/calcite/diff/ac5cce70

Branch: refs/heads/master
Commit: ac5cce707a743299107ba3fd1cfc9a24e745ef42
Parents: 159bcab
Author: Nishant Bangarwa <[email protected]>
Authored: Fri Dec 22 12:53:31 2017 -0800
Committer: Jesus Camacho Rodriguez <[email protected]>
Committed: Fri Dec 22 12:53:34 2017 -0800

----------------------------------------------------------------------
 .../org/apache/calcite/test/CalciteAssert.java  |  10 +-
 .../adapter/druid/DruidDateTimeUtils.java       |   2 +-
 .../calcite/adapter/druid/DruidQuery.java       |  95 +++++++++++----
 .../adapter/druid/TimeExtractionFunction.java   |  21 +++-
 .../org/apache/calcite/test/DruidAdapterIT.java | 117 ++++++++++++++++++-
 5 files changed, 209 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/calcite/blob/ac5cce70/core/src/test/java/org/apache/calcite/test/CalciteAssert.java
----------------------------------------------------------------------
diff --git a/core/src/test/java/org/apache/calcite/test/CalciteAssert.java 
b/core/src/test/java/org/apache/calcite/test/CalciteAssert.java
index 70a415b..49a9c31 100644
--- a/core/src/test/java/org/apache/calcite/test/CalciteAssert.java
+++ b/core/src/test/java/org/apache/calcite/test/CalciteAssert.java
@@ -548,9 +548,13 @@ public class CalciteAssert {
         calciteConnection.getProperties().setProperty(
             CalciteConnectionProperty.CREATE_MATERIALIZATIONS.camelName(),
             Boolean.toString(materializationsEnabled));
-        calciteConnection.getProperties().setProperty(
-            CalciteConnectionProperty.TIME_ZONE.camelName(),
-            DateTimeUtils.UTC_ZONE.getID());
+        if (!calciteConnection.getProperties()
+            .containsKey(CalciteConnectionProperty.TIME_ZONE.camelName())) {
+          // Do not override id some test has already set this property.
+          calciteConnection.getProperties().setProperty(
+              CalciteConnectionProperty.TIME_ZONE.camelName(),
+              DateTimeUtils.UTC_ZONE.getID());
+        }
       }
       for (Pair<Hook, Function> hook : hooks) {
         closer.add(hook.left.addThread(hook.right));

http://git-wip-us.apache.org/repos/asf/calcite/blob/ac5cce70/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
----------------------------------------------------------------------
diff --git 
a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java 
b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
index 562eb4a..566e781 100644
--- 
a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
+++ 
b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidDateTimeUtils.java
@@ -249,7 +249,7 @@ public class DruidDateTimeUtils {
     }
   }
 
-  private static TimestampString literalValue(RexNode node, TimeZone timeZone) 
{
+  protected static TimestampString literalValue(RexNode node, TimeZone 
timeZone) {
     switch (node.getKind()) {
     case LITERAL:
       switch (((RexLiteral) node).getTypeName()) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/ac5cce70/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
----------------------------------------------------------------------
diff --git 
a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java 
b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
index db7d4ff..2469841 100644
--- a/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
+++ b/druid/src/main/java/org/apache/calcite/adapter/druid/DruidQuery.java
@@ -63,6 +63,7 @@ import org.apache.calcite.sql.validate.SqlValidatorUtil;
 import org.apache.calcite.util.ImmutableBitSet;
 import org.apache.calcite.util.Litmus;
 import org.apache.calcite.util.Pair;
+import org.apache.calcite.util.TimestampString;
 import org.apache.calcite.util.Util;
 
 import com.fasterxml.jackson.core.JsonFactory;
@@ -80,12 +81,14 @@ import org.joda.time.Interval;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.math.BigDecimal;
+import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 import java.util.Objects;
 import java.util.Properties;
 import java.util.Set;
+import java.util.TimeZone;
 import java.util.regex.Pattern;
 
 import static org.apache.calcite.sql.SqlKind.INPUT_REF;
@@ -234,6 +237,8 @@ public class DruidQuery extends AbstractRelNode implements 
BindableRel {
       return isValidCast((RexCall) e, boundedComparator);
     case EXTRACT:
       return TimeExtractionFunction.isValidTimeExtract((RexCall) e);
+    case FLOOR:
+      return TimeExtractionFunction.isValidTimeFloor((RexCall) e);
     case IS_TRUE:
       return isValidFilter(((RexCall) e).getOperands().get(0), 
boundedComparator);
     default:
@@ -264,7 +269,9 @@ public class DruidQuery extends AbstractRelNode implements 
BindableRel {
       return true;
     }
     if (e.getOperands().get(0).isA(SqlKind.LITERAL)
-        && e.getType().getFamily() == SqlTypeFamily.TIMESTAMP) {
+        && (e.getType().getSqlTypeName() == SqlTypeName.DATE
+        || e.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP
+        || e.getType().getSqlTypeName() == 
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) {
       // CAST of literal to timestamp type
       return true;
     }
@@ -523,7 +530,7 @@ public class DruidQuery extends AbstractRelNode implements 
BindableRel {
       translator.clearFieldNameLists();
       final ImmutableList.Builder<String> builder = ImmutableList.builder();
       for (RexNode project : projects) {
-        builder.add(translator.translate(project, true));
+        builder.add(translator.translate(project, true, false));
       }
       fieldNames = builder.build();
     }
@@ -1110,6 +1117,7 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
     final DruidTable druidTable;
     final RelDataType rowType;
     final String timeZone;
+    final SimpleDateFormat dateFormatter;
 
     Translator(DruidTable druidTable, RelDataType rowType, String timeZone) {
       this.druidTable = druidTable;
@@ -1124,6 +1132,11 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
         }
       }
       this.timeZone = timeZone;
+      this.dateFormatter = new 
SimpleDateFormat(TimeExtractionFunction.ISO_TIME_FORMAT,
+          Locale.ROOT);
+      if (timeZone != null) {
+        this.dateFormatter.setTimeZone(TimeZone.getTimeZone(timeZone));
+      }
     }
 
     protected void clearFieldNameLists() {
@@ -1131,7 +1144,13 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
       metrics.clear();
     }
 
-    @SuppressWarnings("incomplete-switch") String translate(RexNode e, boolean 
set) {
+    @SuppressWarnings("incomplete-switch")
+    /**
+     * formatDateString is used to format timestamp values to druid format 
using
+     * {@link DruidQuery.Translator#dateFormatter}. This is needed when 
pushing timestamp
+     * comparisons to druid using TimeFormatExtractionFunction that returns a 
string value.
+     */
+    String translate(RexNode e, boolean set, boolean formatDateString) {
       int index = -1;
       switch (e.getKind()) {
       case INPUT_REF:
@@ -1139,9 +1158,23 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
         index = ref.getIndex();
         break;
       case CAST:
-        return tr(e, 0, set);
+        return tr(e, 0, set, formatDateString);
       case LITERAL:
-        return ((RexLiteral) e).getValue3().toString();
+        final RexLiteral rexLiteral = (RexLiteral) e;
+        if (!formatDateString) {
+          return Objects.toString(rexLiteral.getValue3());
+        } else {
+          // Case when we are passing to druid as an extractionFunction
+          // Need to format the timestamp String in druid format.
+          TimestampString timestampString = DruidDateTimeUtils
+              .literalValue(e, TimeZone.getTimeZone(timeZone));
+          if (timestampString == null) {
+            throw new AssertionError(
+                "Cannot translate Literal" + e + " of type "
+                    + rexLiteral.getTypeName() + " to TimestampString");
+          }
+          return dateFormatter.format(timestampString.getMillisSinceEpoch());
+        }
       case FLOOR:
       case EXTRACT:
         final RexCall call = (RexCall) e;
@@ -1200,18 +1233,29 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
         } else {
           throw new AssertionError("it is not a valid comparison: " + e);
         }
+        RexNode posRefNode = call.getOperands().get(posRef);
         final boolean numeric =
             call.getOperands().get(posRef).getType().getFamily()
                 == SqlTypeFamily.NUMERIC;
-        final Granularity granularity = 
DruidDateTimeUtils.extractGranularity(call.getOperands()
-            .get(posRef));
+        boolean formatDateString = false;
+        final Granularity granularity = 
DruidDateTimeUtils.extractGranularity(posRefNode);
         // in case no extraction the field will be omitted from the 
serialization
         ExtractionFunction extractionFunction = null;
         if (granularity != null) {
-          extractionFunction =
-              TimeExtractionFunction.createExtractFromGranularity(granularity, 
timeZone);
+          switch (posRefNode.getKind()) {
+          case EXTRACT:
+            extractionFunction =
+                
TimeExtractionFunction.createExtractFromGranularity(granularity, timeZone);
+            break;
+          case FLOOR:
+            extractionFunction =
+                TimeExtractionFunction.createFloorFromGranularity(granularity, 
timeZone);
+            formatDateString = true;
+            break;
+
+          }
         }
-        String dimName = tr(e, posRef);
+        String dimName = tr(e, posRef, formatDateString);
         if 
(dimName.equals(DruidConnectionImpl.DEFAULT_RESPONSE_TIMESTAMP_COLUMN)) {
           // We need to use Druid default column name to refer to the time 
dimension in a filter
           dimName = DruidTable.DEFAULT_TIMESTAMP_COLUMN;
@@ -1223,17 +1267,18 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
           // we have guarantees about the format of the output and thus we can 
apply the
           // normal selector
           if (numeric && extractionFunction == null) {
-            String constantValue = tr(e, posConstant);
+            String constantValue = tr(e, posConstant, formatDateString);
             return new JsonBound(dimName, constantValue, false, constantValue, 
false,
                 numeric, extractionFunction);
           }
-          return new JsonSelector(dimName, tr(e, posConstant), 
extractionFunction);
+          return new JsonSelector(dimName, tr(e, posConstant, 
formatDateString),
+              extractionFunction);
         case NOT_EQUALS:
           // extractionFunction should be null because if we are using an 
extraction function
           // we have guarantees about the format of the output and thus we can 
apply the
           // normal selector
           if (numeric && extractionFunction == null) {
-            String constantValue = tr(e, posConstant);
+            String constantValue = tr(e, posConstant, formatDateString);
             return new JsonCompositeFilter(JsonFilter.Type.OR,
                 new JsonBound(dimName, constantValue, true, null, false,
                     numeric, extractionFunction),
@@ -1241,30 +1286,30 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
                     numeric, extractionFunction));
           }
           return new JsonCompositeFilter(JsonFilter.Type.NOT,
-              new JsonSelector(dimName, tr(e, posConstant), 
extractionFunction));
+              new JsonSelector(dimName, tr(e, posConstant, formatDateString), 
extractionFunction));
         case GREATER_THAN:
-          return new JsonBound(dimName, tr(e, posConstant),
+          return new JsonBound(dimName, tr(e, posConstant, formatDateString),
               true, null, false, numeric, extractionFunction);
         case GREATER_THAN_OR_EQUAL:
-          return new JsonBound(dimName, tr(e, posConstant),
+          return new JsonBound(dimName, tr(e, posConstant, formatDateString),
               false, null, false, numeric, extractionFunction);
         case LESS_THAN:
           return new JsonBound(dimName, null, false,
-              tr(e, posConstant), true, numeric, extractionFunction);
+              tr(e, posConstant, formatDateString), true, numeric, 
extractionFunction);
         case LESS_THAN_OR_EQUAL:
           return new JsonBound(dimName, null, false,
-              tr(e, posConstant), false, numeric, extractionFunction);
+              tr(e, posConstant, formatDateString), false, numeric, 
extractionFunction);
         case IN:
           ImmutableList.Builder<String> listBuilder = ImmutableList.builder();
           for (RexNode rexNode: call.getOperands()) {
             if (rexNode.getKind() == SqlKind.LITERAL) {
-              listBuilder.add(((RexLiteral) rexNode).getValue3().toString());
+              listBuilder.add(Objects.toString(((RexLiteral) 
rexNode).getValue3()));
             }
           }
           return new JsonInFilter(dimName, listBuilder.build(), 
extractionFunction);
         case BETWEEN:
-          return new JsonBound(dimName, tr(e, 2), false,
-              tr(e, 3), false, numeric, extractionFunction);
+          return new JsonBound(dimName, tr(e, 2, formatDateString), false,
+              tr(e, 3, formatDateString), false, numeric, extractionFunction);
         case IS_NULL:
           return new JsonSelector(dimName, null, extractionFunction);
         case IS_NOT_NULL:
@@ -1284,12 +1329,12 @@ public class DruidQuery extends AbstractRelNode 
implements BindableRel {
       }
     }
 
-    private String tr(RexNode call, int index) {
-      return tr(call, index, false);
+    private String tr(RexNode call, int index, boolean formatDateString) {
+      return tr(call, index, false, formatDateString);
     }
 
-    private String tr(RexNode call, int index, boolean set) {
-      return translate(((RexCall) call).getOperands().get(index), set);
+    private String tr(RexNode call, int index, boolean set, boolean 
formatDateString) {
+      return translate(((RexCall) call).getOperands().get(index), set, 
formatDateString);
     }
 
     private List<JsonFilter> translateFilters(List<RexNode> operands) {

http://git-wip-us.apache.org/repos/asf/calcite/blob/ac5cce70/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
----------------------------------------------------------------------
diff --git 
a/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
 
b/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
index b1f8870..ba4d9d7 100644
--- 
a/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
+++ 
b/druid/src/main/java/org/apache/calcite/adapter/druid/TimeExtractionFunction.java
@@ -47,7 +47,8 @@ public class TimeExtractionFunction implements 
ExtractionFunction {
       TimeUnitRange.DAY,
       TimeUnitRange.WEEK);
 
-  private static final String ISO_TIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+  public static final String ISO_TIME_FORMAT = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
+
   private final String format;
   private final String granularity;
   private final String timeZone;
@@ -132,6 +133,24 @@ public class TimeExtractionFunction implements 
ExtractionFunction {
     final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();
     return timeUnit != null && VALID_TIME_EXTRACT.contains(timeUnit);
   }
+
+  /**
+   * Returns whether the RexCall contains a valid FLOOR unit that we can
+   * serialize to Druid.
+   *
+   * @param call Extract expression
+   *
+   * @return true if the extract unit is valid
+   */
+  public static boolean isValidTimeFloor(RexCall call) {
+    if (call.getKind() != SqlKind.FLOOR) {
+      return false;
+    }
+    final RexLiteral flag = (RexLiteral) call.operands.get(1);
+    final TimeUnitRange timeUnit = (TimeUnitRange) flag.getValue();
+    return timeUnit != null && VALID_TIME_EXTRACT.contains(timeUnit);
+  }
+
 }
 
 // End TimeExtractionFunction.java

http://git-wip-us.apache.org/repos/asf/calcite/blob/ac5cce70/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
----------------------------------------------------------------------
diff --git a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java 
b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
index 913e30c..0b9dc4a 100644
--- a/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
+++ b/druid/src/test/java/org/apache/calcite/test/DruidAdapterIT.java
@@ -1429,18 +1429,18 @@ public class DruidAdapterIT {
   @Test public void testFieldBasedCostColumnPruning() {
     // A query where filter cannot be pushed to Druid but
     // the project can still be pushed in order to prune extra columns.
-    String sql = "select \"countryName\", floor(CAST(\"time\" AS TIMESTAMP) to 
DAY),\n"
+    String sql = "select \"countryName\", ceil(CAST(\"time\" AS TIMESTAMP) to 
DAY),\n"
         + "  cast(count(*) as integer) as c\n"
         + "from \"wiki\"\n"
-        + "where floor(\"time\" to DAY) >= '1997-01-01 00:00:00 UTC'\n"
-        + "and floor(\"time\" to DAY) < '1997-09-01 00:00:00 UTC'\n"
-        + "group by \"countryName\", floor(CAST(\"time\" AS TIMESTAMP) TO 
DAY)\n"
+        + "where ceil(\"time\" to DAY) >= '1997-01-01 00:00:00 UTC'\n"
+        + "and ceil(\"time\" to DAY) < '1997-09-01 00:00:00 UTC'\n"
+        + "group by \"countryName\", ceil(CAST(\"time\" AS TIMESTAMP) TO 
DAY)\n"
         + "order by c limit 5";
     String plan = "BindableProject(countryName=[$0], EXPR$1=[$1], 
C=[CAST($2):INTEGER NOT NULL])\n"
         + "    BindableSort(sort0=[$2], dir0=[ASC], fetch=[5])\n"
         + "      BindableAggregate(group=[{0, 1}], agg#0=[COUNT()])\n"
-        + "        BindableProject(countryName=[$1], 
EXPR$1=[FLOOR(CAST($0):TIMESTAMP(0) NOT NULL, FLAG(DAY))])\n"
-        + "          BindableFilter(condition=[AND(>=(FLOOR($0, FLAG(DAY)), 
1997-01-01 00:00:00), <(FLOOR($0, FLAG(DAY)), 1997-09-01 00:00:00))])\n"
+        + "        BindableProject(countryName=[$1], 
EXPR$1=[CEIL(CAST($0):TIMESTAMP(0) NOT NULL, FLAG(DAY))])\n"
+        + "          BindableFilter(condition=[AND(>=(CEIL($0, FLAG(DAY)), 
1997-01-01 00:00:00), <(CEIL($0, FLAG(DAY)), 1997-09-01 00:00:00))])\n"
         + "            DruidQuery(table=[[wiki, wiki]], 
intervals=[[1900-01-09T00:00:00.000Z/2992-01-10T00:00:00.000Z]], projects=[[$0, 
$5]])";
     // NOTE: Druid query only has countryName as the dimension
     // being queried after project is pushed to druid query.
@@ -3380,6 +3380,111 @@ public class DruidAdapterIT {
         .returnsUnordered("C=86829");
   }
 
+  @Test
+  public void testFilterwithFloorOnTime() {
+    // Test filter on floor on time column is pushed to druid
+    final String sql =
+        "Select cast(floor(\"timestamp\" to MONTH) as timestamp) as t from 
\"foodmart\" where "
+            + "floor(\"timestamp\" to MONTH) between '1997-01-01 00:00:00 UTC'"
+            + "and '1997-03-01 00:00:00 UTC' order by t limit 2";
+
+    final String druidQueryPart1 = "\"filter\":{\"type\":\"and\",\"fields\":"
+        + 
"[{\"type\":\"bound\",\"dimension\":\"__time\",\"lower\":\"1997-01-01T00:00:00.000Z\","
+        + "\"lowerStrict\":false,\"ordering\":\"lexicographic\","
+        + "\"extractionFn\":{\"type\":\"timeFormat\",\"format\":\"yyyy-MM-dd";
+    final String druidQueryPart2 = "HH:mm:ss.SSS";
+    final String druidQueryPart3 = 
",\"granularity\":\"month\",\"timeZone\":\"UTC\","
+        + "\"locale\":\"en-US\"}},{\"type\":\"bound\",\"dimension\":\"__time\""
+        + ",\"upper\":\"1997-03-01T00:00:00.000Z\",\"upperStrict\":false,"
+        + 
"\"ordering\":\"lexicographic\",\"extractionFn\":{\"type\":\"timeFormat\"";
+    final String druidQueryPart4 = 
"\"columns\":[\"__time\"],\"granularity\":\"all\"";
+
+    sql(sql, FOODMART)
+        .queryContains(
+            druidChecker(druidQueryPart1, druidQueryPart2, druidQueryPart3, 
druidQueryPart4))
+        .returnsOrdered("T=1997-01-01 00:00:00", "T=1997-01-01 00:00:00");
+  }
+
+  @Test
+  public void testSelectFloorOnTimeWithFilterOnFloorOnTime() {
+    final String sql = "Select cast(floor(\"timestamp\" to MONTH) as 
timestamp) as t from "
+        + "\"foodmart\" where floor(\"timestamp\" to MONTH) >= '1997-05-01 
00:00:00 UTC' order by t"
+        + " limit 1";
+    final String druidQueryPart1 = 
"filter\":{\"type\":\"bound\",\"dimension\":\"__time\","
+        + "\"lower\":\"1997-05-01T00:00:00.000Z\",\"lowerStrict\":false,"
+        + 
"\"ordering\":\"lexicographic\",\"extractionFn\":{\"type\":\"timeFormat\","
+        + "\"format\":\"yyyy-MM-dd";
+    final String druidQueryPart2 = 
"\"granularity\":\"month\",\"timeZone\":\"UTC\","
+        + 
"\"locale\":\"en-US\"}},\"columns\":[\"__time\"],\"granularity\":\"all\"";
+
+    sql(sql, FOODMART).queryContains(druidChecker(druidQueryPart1, 
druidQueryPart2))
+        .returnsOrdered("T=1997-05-01 00:00:00");
+  }
+
+  @Test
+  public void testTmeWithFilterOnFloorOnTimeAndCastToTimestamp() {
+    final String sql = "Select cast(floor(\"timestamp\" to MONTH) as 
timestamp) as t from "
+        + "\"foodmart\" where floor(\"timestamp\" to MONTH) >= 
cast('1997-05-01 00:00:00' as TIMESTAMP) order by t"
+        + " limit 1";
+    final String druidQueryPart1 = 
"filter\":{\"type\":\"bound\",\"dimension\":\"__time\","
+        + "\"lower\":\"1997-05-01T00:00:00.000Z\",\"lowerStrict\":false,"
+        + 
"\"ordering\":\"lexicographic\",\"extractionFn\":{\"type\":\"timeFormat\","
+        + "\"format\":\"yyyy-MM-dd";
+    final String druidQueryPart2 = 
"\"granularity\":\"month\",\"timeZone\":\"UTC\","
+        + 
"\"locale\":\"en-US\"}},\"columns\":[\"__time\"],\"granularity\":\"all\"";
+
+    sql(sql, FOODMART).queryContains(druidChecker(druidQueryPart1, 
druidQueryPart2))
+        .returnsOrdered("T=1997-05-01 00:00:00");
+  }
+
+  @Test
+  public void testTmeWithFilterOnFloorOnTimeWithTimezone() {
+    final String sql = "Select cast(floor(\"timestamp\" to MONTH) as 
timestamp) as t from "
+        + "\"foodmart\" where floor(\"timestamp\" to MONTH) >= 
cast('1997-05-01 00:00:00'"
+        + " as TIMESTAMP) order by t limit 1";
+    final String druidQueryPart1 = 
"filter\":{\"type\":\"bound\",\"dimension\":\"__time\","
+        + "\"lower\":\"1997-05-01T00:00:00.000Z\",\"lowerStrict\":false,"
+        + 
"\"ordering\":\"lexicographic\",\"extractionFn\":{\"type\":\"timeFormat\","
+        + "\"format\":\"yyyy-MM-dd";
+    final String druidQueryPart2 = 
"\"granularity\":\"month\",\"timeZone\":\"IST\","
+        + 
"\"locale\":\"en-US\"}},\"columns\":[\"__time\"],\"granularity\":\"all\"";
+
+    CalciteAssert.that()
+        .enable(enabled())
+        .with(ImmutableMap.of("model", FOODMART.getPath()))
+        .with(CalciteConnectionProperty.TIME_ZONE.camelName(), "IST")
+        .query(sql)
+        .runs()
+        .queryContains(druidChecker(druidQueryPart1, druidQueryPart2))
+        // NOTE: this return value is not as expected
+        // see https://issues.apache.org/jira/browse/CALCITE-2107
+        .returnsOrdered("T=1997-05-01 05:30:00");
+  }
+
+  @Test
+  public void testTmeWithFilterOnFloorOnTimeWithTimezoneConversion() {
+    final String sql = "Select cast(floor(\"timestamp\" to MONTH) as 
timestamp) as t from "
+        + "\"foodmart\" where floor(\"timestamp\" to MONTH) >= '1997-04-30 
18:30:00 UTC' order by t"
+        + " limit 1";
+    final String druidQueryPart1 = 
"filter\":{\"type\":\"bound\",\"dimension\":\"__time\","
+        + "\"lower\":\"1997-05-01T00:00:00.000Z\",\"lowerStrict\":false,"
+        + 
"\"ordering\":\"lexicographic\",\"extractionFn\":{\"type\":\"timeFormat\","
+        + "\"format\":\"yyyy-MM-dd";
+    final String druidQueryPart2 = 
"\"granularity\":\"month\",\"timeZone\":\"IST\","
+        + 
"\"locale\":\"en-US\"}},\"columns\":[\"__time\"],\"granularity\":\"all\"";
+
+    CalciteAssert.that()
+        .enable(enabled())
+        .with(ImmutableMap.of("model", FOODMART.getPath()))
+        .with(CalciteConnectionProperty.TIME_ZONE.camelName(), "IST")
+        .query(sql)
+        .runs()
+        .queryContains(druidChecker(druidQueryPart1, druidQueryPart2))
+        // NOTE: this return value is not as expected
+        // see https://issues.apache.org/jira/browse/CALCITE-2107
+        .returnsOrdered("T=1997-05-01 05:30:00");
+  }
+
 }
 
 // End DruidAdapterIT.java

Reply via email to