jonvex commented on code in PR #13711:
URL: https://github.com/apache/hudi/pull/13711#discussion_r2354122882


##########
hudi-spark-datasource/hudi-spark/src/test/scala/org/apache/spark/sql/hudi/feature/index/TestExpressionIndex.scala:
##########
@@ -1548,28 +1550,28 @@ class TestExpressionIndex extends 
HoodieSparkSqlTestBase {
         spark.sql(s"create index idx_to_timestamp_default on $tableName using 
column_stats(date) options(expr='to_timestamp')")
         metaClient = HoodieTableMetaClient.reload(metaClient)
         val toTimestampDefault = resolveExpr(spark, 
unapply(functions.to_timestamp(functions.col("date"))).get, tableSchema)
-        dataFilter = EqualTo(toTimestampDefault, lit(1732924800000000L).expr)
+        dataFilter = EqualTo(toTimestampDefault, 
lit(SparkValueMetadata.convertJavaTypeToSparkType(ValueType.toTimestampMicros(1732924800000000L,
 null), false)).expr)
         verifyFilePruning(opts, dataFilter, metaClient, isDataSkippingExpected 
= true)
         spark.sql(s"drop index idx_to_timestamp_default on $tableName")
 
         spark.sql(s"create index idx_to_timestamp on $tableName using 
column_stats(date) options(expr='to_timestamp', format='yyyy-MM-dd')")
         metaClient = HoodieTableMetaClient.reload(metaClient)
         val toTimestamp = resolveExpr(spark, 
unapply(functions.to_timestamp(functions.col("date"), "yyyy-MM-dd")).get, 
tableSchema)
-        dataFilter = EqualTo(toTimestamp, lit(1732924800000000L).expr)
+        dataFilter = EqualTo(toTimestamp, 
lit(SparkValueMetadata.convertJavaTypeToSparkType(ValueType.toTimestampMicros(1732924800000000L,
 null), false)).expr)
         verifyFilePruning(opts, dataFilter, metaClient, isDataSkippingExpected 
= true)
         spark.sql(s"drop index idx_to_timestamp on $tableName")
 
         spark.sql(s"create index idx_date_add on $tableName using 
column_stats(date) options(expr='date_add', days='10')")
         metaClient = HoodieTableMetaClient.reload(metaClient)
         val dateAdd = resolveExpr(spark, 
unapply(functions.date_add(functions.col("date"), 10)).get, tableSchema)
-        dataFilter = EqualTo(dateAdd, lit(18606).expr)
+        dataFilter = EqualTo(dateAdd, 
lit(HoodieAvroUtils.toJavaDate(18606)).expr)
         verifyFilePruning(opts, dataFilter, metaClient, isDataSkippingExpected 
= true)
         spark.sql(s"drop index idx_date_add on $tableName")
 
         spark.sql(s"create index idx_date_sub on $tableName using 
column_stats(date) options(expr='date_sub', days='10')")
         metaClient = HoodieTableMetaClient.reload(metaClient)
         val dateSub = resolveExpr(spark, 
unapply(functions.date_sub(functions.col("date"), 10)).get, tableSchema)
-        dataFilter = EqualTo(dateSub, lit(18586).expr)
+        dataFilter = EqualTo(dateSub, 
lit(HoodieAvroUtils.toJavaDate(18586)).expr)

Review Comment:
   I think it was wrong before. If you look at `functions.date_add` it says it 
returns a date, not a long. All the changes in this file are where the return 
type didn't match and I had to fix it



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to