This is an automated email from the ASF dual-hosted git repository.
philo pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new dbbb4a7df [CORE] Fix fallback for spark sequence function with literal
array data as input (#6433)
dbbb4a7df is described below
commit dbbb4a7dfad14f7dffa208d70cb3ea587c31633e
Author: 高阳阳 <[email protected]>
AuthorDate: Tue Jul 16 14:20:55 2024 +0800
[CORE] Fix fallback for spark sequence function with literal array data as
input (#6433)
---
.../gluten/execution/ScalarFunctionsValidateSuite.scala | 10 ++++++++++
.../gluten/substrait/expression/ExpressionBuilder.java | 15 +++++++++++++++
2 files changed, 25 insertions(+)
diff --git
a/backends-velox/src/test/scala/org/apache/gluten/execution/ScalarFunctionsValidateSuite.scala
b/backends-velox/src/test/scala/org/apache/gluten/execution/ScalarFunctionsValidateSuite.scala
index 39c1b4560..3b9e24795 100644
---
a/backends-velox/src/test/scala/org/apache/gluten/execution/ScalarFunctionsValidateSuite.scala
+++
b/backends-velox/src/test/scala/org/apache/gluten/execution/ScalarFunctionsValidateSuite.scala
@@ -17,6 +17,7 @@
package org.apache.gluten.execution
import org.apache.spark.SparkException
+import org.apache.spark.sql.catalyst.optimizer.NullPropagation
import org.apache.spark.sql.execution.ProjectExec
import org.apache.spark.sql.types._
@@ -664,6 +665,15 @@ class ScalarFunctionsValidateSuite extends
FunctionsValidateTest {
}
}
+ test("Test sequence function optimized by Spark constant folding") {
+ withSQLConf(("spark.sql.optimizer.excludedRules",
NullPropagation.ruleName)) {
+ runQueryAndCompare("""SELECT sequence(1, 5), l_orderkey
+ | from lineitem limit 100""".stripMargin) {
+ checkGlutenOperatorMatch[ProjectExecTransformer]
+ }
+ }
+ }
+
test("Test raise_error, assert_true function") {
runQueryAndCompare("""SELECT assert_true(l_orderkey >= 1), l_orderkey
| from lineitem limit 100""".stripMargin) {
diff --git
a/gluten-core/src/main/java/org/apache/gluten/substrait/expression/ExpressionBuilder.java
b/gluten-core/src/main/java/org/apache/gluten/substrait/expression/ExpressionBuilder.java
index e322e1528..16ae5412e 100644
---
a/gluten-core/src/main/java/org/apache/gluten/substrait/expression/ExpressionBuilder.java
+++
b/gluten-core/src/main/java/org/apache/gluten/substrait/expression/ExpressionBuilder.java
@@ -23,7 +23,9 @@ import org.apache.gluten.substrait.type.*;
import org.apache.spark.sql.catalyst.InternalRow;
import org.apache.spark.sql.catalyst.expressions.Attribute;
import org.apache.spark.sql.catalyst.expressions.Expression;
+import org.apache.spark.sql.catalyst.expressions.UnsafeArrayData;
import org.apache.spark.sql.catalyst.util.ArrayData;
+import org.apache.spark.sql.catalyst.util.GenericArrayData;
import org.apache.spark.sql.catalyst.util.MapData;
import org.apache.spark.sql.types.*;
@@ -215,6 +217,19 @@ public class ExpressionBuilder {
public static LiteralNode makeLiteral(Object obj, DataType dataType, Boolean
nullable) {
TypeNode typeNode = ConverterUtils.getTypeNode(dataType, nullable);
+ if (obj instanceof UnsafeArrayData) {
+ UnsafeArrayData oldObj = (UnsafeArrayData) obj;
+ int numElements = oldObj.numElements();
+ Object[] elements = new Object[numElements];
+ DataType elementType = ((ArrayType) dataType).elementType();
+
+ for (int i = 0; i < numElements; i++) {
+ elements[i] = oldObj.get(i, elementType);
+ }
+
+ GenericArrayData newObj = new GenericArrayData(elements);
+ return makeListLiteral(newObj, typeNode);
+ }
return makeLiteral(obj, typeNode);
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]