This is an automated email from the ASF dual-hosted git repository.
mbutrovich pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 10d0c8d21 feat: add support for next_day expression (#3148)
10d0c8d21 is described below
commit 10d0c8d2101b2eb0362dde1aaee53ba14d5dfd1d
Author: Andy Grove <[email protected]>
AuthorDate: Wed Feb 11 08:15:49 2026 -0700
feat: add support for next_day expression (#3148)
* feat: add support for next_day expression
Adds native Comet support for Spark's next_day function which
returns the first date after a given date that falls on the
specified day of the week.
Supports full day names (Sunday, Monday, etc.) and abbreviations
(Sun, Mon, etc.).
Closes #3092
Co-Authored-By: Claude Opus 4.5 <[email protected]>
* docs
* test: migrate next_day tests to SQL file-based approach
Co-Authored-By: Claude Opus 4.6 <[email protected]>
* refactor: use next_day implementation from datafusion-spark crate
Replace our custom SparkNextDay with the upstream datafusion-spark
version, which also handles LargeUtf8 and Utf8View string types.
Co-Authored-By: Claude Opus 4.6 <[email protected]>
---------
Co-authored-by: Claude Opus 4.5 <[email protected]>
---
native/core/src/execution/jni_api.rs | 2 +
.../org/apache/comet/serde/QueryPlanSerde.scala | 1 +
.../scala/org/apache/comet/serde/datetime.scala | 4 +-
.../sql-tests/expressions/datetime/next_day.sql | 76 ++++++++++++++++++++++
4 files changed, 82 insertions(+), 1 deletion(-)
diff --git a/native/core/src/execution/jni_api.rs
b/native/core/src/execution/jni_api.rs
index 146e0feb8..b1e48828f 100644
--- a/native/core/src/execution/jni_api.rs
+++ b/native/core/src/execution/jni_api.rs
@@ -45,6 +45,7 @@ use
datafusion_spark::function::bitwise::bitwise_not::SparkBitwiseNot;
use datafusion_spark::function::datetime::date_add::SparkDateAdd;
use datafusion_spark::function::datetime::date_sub::SparkDateSub;
use datafusion_spark::function::datetime::last_day::SparkLastDay;
+use datafusion_spark::function::datetime::next_day::SparkNextDay;
use datafusion_spark::function::hash::sha1::SparkSha1;
use datafusion_spark::function::hash::sha2::SparkSha2;
use datafusion_spark::function::map::map_from_entries::MapFromEntries;
@@ -349,6 +350,7 @@ fn register_datafusion_spark_function(session_ctx:
&SessionContext) {
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkDateAdd::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkDateSub::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkLastDay::default()));
+
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkNextDay::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkSha1::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkConcat::default()));
session_ctx.register_udf(ScalarUDF::new_from_impl(SparkBitwiseNot::default()));
diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
index 60c0dbdc0..960aff870 100644
--- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
@@ -199,6 +199,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
classOf[Hour] -> CometHour,
classOf[MakeDate] -> CometMakeDate,
classOf[Minute] -> CometMinute,
+ classOf[NextDay] -> CometNextDay,
classOf[Second] -> CometSecond,
classOf[TruncDate] -> CometTruncDate,
classOf[TruncTimestamp] -> CometTruncTimestamp,
diff --git a/spark/src/main/scala/org/apache/comet/serde/datetime.scala
b/spark/src/main/scala/org/apache/comet/serde/datetime.scala
index c2ddb0317..d36b6a3b4 100644
--- a/spark/src/main/scala/org/apache/comet/serde/datetime.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/datetime.scala
@@ -21,7 +21,7 @@ package org.apache.comet.serde
import java.util.Locale
-import org.apache.spark.sql.catalyst.expressions.{Attribute, DateAdd,
DateDiff, DateFormatClass, DateSub, DayOfMonth, DayOfWeek, DayOfYear,
GetDateField, Hour, LastDay, Literal, MakeDate, Minute, Month, Quarter, Second,
TruncDate, TruncTimestamp, UnixDate, UnixTimestamp, WeekDay, WeekOfYear, Year}
+import org.apache.spark.sql.catalyst.expressions.{Attribute, DateAdd,
DateDiff, DateFormatClass, DateSub, DayOfMonth, DayOfWeek, DayOfYear,
GetDateField, Hour, LastDay, Literal, MakeDate, Minute, Month, NextDay,
Quarter, Second, TruncDate, TruncTimestamp, UnixDate, UnixTimestamp, WeekDay,
WeekOfYear, Year}
import org.apache.spark.sql.types.{DateType, IntegerType, StringType,
TimestampType}
import org.apache.spark.unsafe.types.UTF8String
@@ -310,6 +310,8 @@ object CometDateAdd extends
CometScalarFunction[DateAdd]("date_add")
object CometDateSub extends CometScalarFunction[DateSub]("date_sub")
+object CometNextDay extends CometScalarFunction[NextDay]("next_day")
+
object CometMakeDate extends CometScalarFunction[MakeDate]("make_date")
object CometLastDay extends CometScalarFunction[LastDay]("last_day")
diff --git
a/spark/src/test/resources/sql-tests/expressions/datetime/next_day.sql
b/spark/src/test/resources/sql-tests/expressions/datetime/next_day.sql
new file mode 100644
index 000000000..0effb36aa
--- /dev/null
+++ b/spark/src/test/resources/sql-tests/expressions/datetime/next_day.sql
@@ -0,0 +1,76 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements. See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership. The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied. See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+-- ConfigMatrix: parquet.enable.dictionary=false,true
+
+statement
+CREATE TABLE test_next_day(d date) USING parquet
+
+statement
+INSERT INTO test_next_day VALUES (date('2023-01-01')), (date('2024-02-29')),
(date('1969-12-31')), (date('2024-06-15')), (NULL)
+
+-- full day names
+query
+SELECT next_day(d, 'Sunday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Monday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Tuesday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Wednesday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Thursday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Friday') FROM test_next_day
+
+query
+SELECT next_day(d, 'Saturday') FROM test_next_day
+
+-- abbreviated day names
+query
+SELECT next_day(d, 'Sun') FROM test_next_day
+
+query
+SELECT next_day(d, 'Mon') FROM test_next_day
+
+query
+SELECT next_day(d, 'Tue') FROM test_next_day
+
+query
+SELECT next_day(d, 'Wed') FROM test_next_day
+
+query
+SELECT next_day(d, 'Thu') FROM test_next_day
+
+query
+SELECT next_day(d, 'Fri') FROM test_next_day
+
+query
+SELECT next_day(d, 'Sat') FROM test_next_day
+
+-- literal arguments
+query
+SELECT next_day(date('2023-01-01'), 'Monday'), next_day(date('2023-01-01'),
'Sunday')
+
+-- null handling
+query
+SELECT next_day(NULL, 'Monday'), next_day(date('2023-01-01'), NULL)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]