This is an automated email from the ASF dual-hosted git repository.
philo pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new a0b7a2c236 [GLUTEN-10170][VL] Offload `try` arithmetic functions
regardless of ANSI configuration (#10267)
a0b7a2c236 is described below
commit a0b7a2c2365c59574f5e8288dc4f9118aec9ed3b
Author: nimesh1601 <[email protected]>
AuthorDate: Wed Aug 20 19:25:50 2025 +0530
[GLUTEN-10170][VL] Offload `try` arithmetic functions regardless of ANSI
configuration (#10267)
---
.../backendsapi/velox/VeloxSparkPlanExecApi.scala | 4 --
.../functions/MathFunctionsValidateSuite.scala | 44 ++++++++++++++++++++++
2 files changed, 44 insertions(+), 4 deletions(-)
diff --git
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxSparkPlanExecApi.scala
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxSparkPlanExecApi.scala
index 816b6d5a66..aaa6836f14 100644
---
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxSparkPlanExecApi.scala
+++
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxSparkPlanExecApi.scala
@@ -117,10 +117,6 @@ class VeloxSparkPlanExecApi extends SparkPlanExecApi {
right: ExpressionTransformer,
original: TryEval,
checkArithmeticExprName: String): ExpressionTransformer = {
- if (SparkShimLoader.getSparkShims.withAnsiEvalMode(original.child)) {
- throw new GlutenNotSupportException(
- s"${original.child.prettyName} with ansi mode is not supported")
- }
original.child.dataType match {
case LongType | IntegerType | ShortType | ByteType =>
case _ => throw new GlutenNotSupportException(s"$substraitExprName is
not supported")
diff --git
a/backends-velox/src/test/scala/org/apache/gluten/functions/MathFunctionsValidateSuite.scala
b/backends-velox/src/test/scala/org/apache/gluten/functions/MathFunctionsValidateSuite.scala
index ba7d98991e..6657d3d99f 100644
---
a/backends-velox/src/test/scala/org/apache/gluten/functions/MathFunctionsValidateSuite.scala
+++
b/backends-velox/src/test/scala/org/apache/gluten/functions/MathFunctionsValidateSuite.scala
@@ -21,6 +21,7 @@ import org.apache.gluten.execution.{BatchScanExecTransformer,
ProjectExecTransfo
import org.apache.spark.SparkConf
import org.apache.spark.sql.Row
+import org.apache.spark.sql.internal.SQLConf
class MathFunctionsValidateSuiteRasOff extends MathFunctionsValidateSuite {
override protected def sparkConf: SparkConf = {
@@ -36,6 +37,49 @@ class MathFunctionsValidateSuiteRasOn extends
MathFunctionsValidateSuite {
}
}
+class MathFunctionsValidateSuiteAnsiOn extends FunctionsValidateSuite {
+
+ override protected def sparkConf: SparkConf = {
+ super.sparkConf
+ .set(SQLConf.ANSI_ENABLED.key, "true")
+ .set(GlutenConfig.GLUTEN_ANSI_FALLBACK_ENABLED.key, "false")
+ }
+
+ disableFallbackCheck
+
+ test("try_add") {
+ runQueryAndCompare(
+ "select try_add(cast(l_orderkey as int), 1), try_add(cast(l_orderkey as
int), 2147483647)" +
+ " from lineitem") {
+ checkGlutenOperatorMatch[ProjectExecTransformer]
+ }
+ }
+
+ test("try_divide") {
+ runQueryAndCompare(
+ "select try_divide(cast(l_orderkey as int), 0) from lineitem",
+ noFallBack = false) {
+ _ => // Spark would always cast inputs to double for this function.
+ }
+ }
+
+ testWithMinSparkVersion("try_multiply", "3.3") {
+ runQueryAndCompare(
+ "select try_multiply(2147483647, cast(l_orderkey as int)), " +
+ "try_multiply(-2147483648, cast(l_orderkey as int)) from lineitem") {
+ checkGlutenOperatorMatch[ProjectExecTransformer]
+ }
+ }
+
+ testWithMinSparkVersion("try_subtract", "3.3") {
+ runQueryAndCompare(
+ "select try_subtract(2147483647, cast(l_orderkey as int)), " +
+ "try_subtract(-2147483648, cast(l_orderkey as int)) from lineitem") {
+ checkGlutenOperatorMatch[ProjectExecTransformer]
+ }
+ }
+}
+
abstract class MathFunctionsValidateSuite extends FunctionsValidateSuite {
disableFallbackCheck
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]