This is an automated email from the ASF dual-hosted git repository.

xiangfu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git


The following commit(s) were added to refs/heads/master by this push:
     new 972b555cc5 fixing dateTimeConvert
972b555cc5 is described below

commit 972b555cc5609a88002ac2c4501ece247b51cebe
Author: Xiang Fu <[email protected]>
AuthorDate: Tue Nov 7 21:55:58 2023 -0800

    fixing dateTimeConvert
---
 .../common/function/TransformFunctionType.java     |  2 +-
 .../apache/pinot/common/utils/PinotDataType.java   |  8 ++--
 .../integration/tests/custom/TimestampTest.java    | 54 +++++++++++++++++++++-
 3 files changed, 59 insertions(+), 5 deletions(-)

diff --git 
a/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
 
b/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
index 2b0b249e3e..330890a2f9 100644
--- 
a/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
+++ 
b/pinot-common/src/main/java/org/apache/pinot/common/function/TransformFunctionType.java
@@ -366,7 +366,7 @@ public enum TransformFunctionType {
     int outputFormatPos = 2;
     if (opBinding.getOperandCount() > outputFormatPos
         && opBinding.isOperandLiteral(outputFormatPos, false)) {
-      String outputFormatStr = 
opBinding.getOperandLiteralValue(outputFormatPos, String.class).toUpperCase();
+      String outputFormatStr = 
opBinding.getOperandLiteralValue(outputFormatPos, String.class);
       DateTimeFormatSpec dateTimeFormatSpec = new 
DateTimeFormatSpec(outputFormatStr);
       if ((dateTimeFormatSpec.getTimeFormat() == 
DateTimeFieldSpec.TimeFormat.EPOCH) || (
           dateTimeFormatSpec.getTimeFormat() == 
DateTimeFieldSpec.TimeFormat.TIMESTAMP)) {
diff --git 
a/pinot-common/src/main/java/org/apache/pinot/common/utils/PinotDataType.java 
b/pinot-common/src/main/java/org/apache/pinot/common/utils/PinotDataType.java
index 953a874cbe..179e1342dc 100644
--- 
a/pinot-common/src/main/java/org/apache/pinot/common/utils/PinotDataType.java
+++ 
b/pinot-common/src/main/java/org/apache/pinot/common/utils/PinotDataType.java
@@ -974,7 +974,7 @@ public enum PinotDataType {
         // String does not represent a well-formed JSON. Ignore this exception 
because we are going to try to convert
         // Java String object to JSON string.
       } catch (Exception e) {
-          throw new RuntimeException("Unable to convert String into JSON. 
Input value: " + value, e);
+        throw new RuntimeException("Unable to convert String into JSON. Input 
value: " + value, e);
       }
     }
 
@@ -1232,7 +1232,7 @@ public enum PinotDataType {
       return (boolean[]) value;
     }
     if (isSingleValue()) {
-      return new boolean[] {toBoolean(value)};
+      return new boolean[]{toBoolean(value)};
     } else {
       Object[] valueArray = toObjectArray(value);
       int length = valueArray.length;
@@ -1250,7 +1250,7 @@ public enum PinotDataType {
       return (Timestamp[]) value;
     }
     if (isSingleValue()) {
-      return new Timestamp[] {toTimestamp(value)};
+      return new Timestamp[]{toTimestamp(value)};
     } else {
       Object[] valueArray = toObjectArray(value);
       int length = valueArray.length;
@@ -1476,6 +1476,8 @@ public enum PinotDataType {
         return JSON;
       case BYTES:
         return BYTES;
+      case OBJECT:
+        return OBJECT;
       case INT_ARRAY:
         return PRIMITIVE_INT_ARRAY;
       case LONG_ARRAY:
diff --git 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/custom/TimestampTest.java
 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/custom/TimestampTest.java
index 14818f514f..603a627555 100644
--- 
a/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/custom/TimestampTest.java
+++ 
b/pinot-integration-tests/src/test/java/org/apache/pinot/integration/tests/custom/TimestampTest.java
@@ -59,6 +59,13 @@ public class TimestampTest extends 
CustomDataQueryClusterIntegrationTest {
   private static final String LONG_ONE_MONTH_AFTER = "longOneMonthAfter";
   private static final String LONG_ONE_QUARTER_AFTER = "longOneQuarterAfter";
   private static final String LONG_ONE_YEAR_AFTER = "longOneYearAfter";
+  private static final String YYYY_MM_DD_BASE = "yyyyMMddBase";
+  private static final String YYYY_MM_DD_HALF_DAY_AFTER = 
"yyyyMMddHalfDayAfter";
+  private static final String YYYY_MM_DD_ONE_DAY_AFTER = "yyyyMMddOneDayAfter";
+  private static final String YYYY_MM_DD_ONE_WEEK_AFTER = 
"yyyyMMddOneWeekAfter";
+  private static final String YYYY_MM_DD_ONE_MONTH_AFTER = 
"yyyyMMddOneMonthAfter";
+  private static final String YYYY_MM_DD_ONE_QUARTER_AFTER = 
"yyyyMMddOneQuarterAfter";
+  private static final String YYYY_MM_DD_ONE_YEAR_AFTER = 
"yyyyMMddOneYearAfter";
 
   private static final TimeZone DEFAULT_TIME_ZONE = TimeZone.getDefault();
 
@@ -140,6 +147,36 @@ public class TimestampTest extends 
CustomDataQueryClusterIntegrationTest {
     
assertEquals(jsonNode.get("resultTable").get("rows").get(0).get(0).asText(), 
"2019-01-01 00:00:00.0");
   }
 
+  @Test(dataProvider = "useBothQueryEngines")
+  public void testSelectWithStringCastAndFilterQueries(boolean 
useMultiStageQueryEngine)
+      throws Exception {
+    setUseMultiStageQueryEngine(useMultiStageQueryEngine);
+    String query = String.format("\n"
+        + "SELECT FromDateTime(DATETIMECONVERT(\"yyyyMMddBase\", 
'1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd', "
+        + "'1:DAYS:SIMPLE_DATE_FORMAT:yyyy-MM-dd HH:mm:ss.SSSZ 
tz(Europe/Amsterdam)', '1:DAYS'), 'yyyy-MM-dd "
+        + "HH:mm:ss.SSSZ') AS ts, COUNT(*) as cnt\n"
+        + "FROM %s\n"
+        + "GROUP BY \"yyyyMMddBase\"\n"
+        + "HAVING COUNT(*) > 0\n"
+        + "ORDER BY 1\n "
+        + "LIMIT 5", getTableName());
+    JsonNode jsonNode = postQuery(query);
+    assertEquals(jsonNode.get("resultTable").get("rows").size(), 5);
+    if (useMultiStageQueryEngine) {
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(0).get(0).asText(), 
"2018-12-31 23:00:00.0");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(1).get(0).asText(), 
"2019-01-01 23:00:00.0");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(2).get(0).asText(), 
"2019-01-02 23:00:00.0");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(3).get(0).asText(), 
"2019-01-03 23:00:00.0");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(4).get(0).asText(), 
"2019-01-04 23:00:00.0");
+    } else {
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(0).get(0).asText(), 
"1546297200000");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(1).get(0).asText(), 
"1546383600000");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(2).get(0).asText(), 
"1546470000000");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(3).get(0).asText(), 
"1546556400000");
+      
assertEquals(jsonNode.get("resultTable").get("rows").get(4).get(0).asText(), 
"1546642800000");
+    }
+  }
+
   @Test(dataProvider = "useBothQueryEngines")
   public void testTimeExtractFunction(boolean useMultiStageQueryEngine)
       throws Exception {
@@ -416,6 +453,7 @@ public class TimestampTest extends 
CustomDataQueryClusterIntegrationTest {
         .addSingleValueDimension(LONG_ONE_MONTH_AFTER, FieldSpec.DataType.LONG)
         .addSingleValueDimension(LONG_ONE_QUARTER_AFTER, 
FieldSpec.DataType.LONG)
         .addSingleValueDimension(LONG_ONE_YEAR_AFTER, FieldSpec.DataType.LONG)
+        .addSingleValueDimension(YYYY_MM_DD_BASE, FieldSpec.DataType.STRING)
         .build();
   }
 
@@ -438,7 +476,14 @@ public class TimestampTest extends 
CustomDataQueryClusterIntegrationTest {
         new Field(LONG_ONE_WEEK_AFTER, create(Type.LONG), null, null),
         new Field(LONG_ONE_MONTH_AFTER, create(Type.LONG), null, null),
         new Field(LONG_ONE_QUARTER_AFTER, create(Type.LONG), null, null),
-        new Field(LONG_ONE_YEAR_AFTER, create(Type.LONG), null, null)
+        new Field(LONG_ONE_YEAR_AFTER, create(Type.LONG), null, null),
+        new Field(YYYY_MM_DD_BASE, create(Type.STRING), null, null),
+        new Field(YYYY_MM_DD_HALF_DAY_AFTER, create(Type.STRING), null, null),
+        new Field(YYYY_MM_DD_ONE_DAY_AFTER, create(Type.STRING), null, null),
+        new Field(YYYY_MM_DD_ONE_WEEK_AFTER, create(Type.STRING), null, null),
+        new Field(YYYY_MM_DD_ONE_MONTH_AFTER, create(Type.STRING), null, null),
+        new Field(YYYY_MM_DD_ONE_QUARTER_AFTER, create(Type.STRING), null, 
null),
+        new Field(YYYY_MM_DD_ONE_YEAR_AFTER, create(Type.STRING), null, null)
     ));
 
     // create avro file
@@ -472,6 +517,13 @@ public class TimestampTest extends 
CustomDataQueryClusterIntegrationTest {
         record.put(LONG_ONE_MONTH_AFTER, tsOneMonthAfter);
         record.put(LONG_ONE_QUARTER_AFTER, tsOneQuarterAfter);
         record.put(LONG_ONE_YEAR_AFTER, tsOneYearAfter);
+        record.put(YYYY_MM_DD_BASE, DateTimeFunctions.toDateTime(tsBaseLong, 
"yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_HALF_DAY_AFTER, 
DateTimeFunctions.toDateTime(tsHalfDayAfter, "yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_ONE_DAY_AFTER, 
DateTimeFunctions.toDateTime(tsOneDayAfter, "yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_ONE_WEEK_AFTER, 
DateTimeFunctions.toDateTime(tsOneWeekAfter, "yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_ONE_MONTH_AFTER, 
DateTimeFunctions.toDateTime(tsOneMonthAfter, "yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_ONE_QUARTER_AFTER, 
DateTimeFunctions.toDateTime(tsOneQuarterAfter, "yyyy-MM-dd"));
+        record.put(YYYY_MM_DD_ONE_YEAR_AFTER, 
DateTimeFunctions.toDateTime(tsOneYearAfter, "yyyy-MM-dd"));
 
         // add avro record to file
         fileWriter.append(record);


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to