This is an automated email from the ASF dual-hosted git repository.

yuanzhou pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new ba01f8b71 [GLUTEN-5341] Fix and enable delta UTs for Spark3.5 (#5393)
ba01f8b71 is described below

commit ba01f8b7121324729c6ed5be4a36d9770791335e
Author: Yan Ma <[email protected]>
AuthorDate: Mon Apr 15 21:08:15 2024 +0800

    [GLUTEN-5341] Fix and enable delta UTs for Spark3.5 (#5393)
---
 .../gluten/execution/DeltaScanTransformer.scala    |  6 +++++-
 .../apache/gluten/execution/VeloxDeltaSuite.scala  | 24 +++++++---------------
 2 files changed, 12 insertions(+), 18 deletions(-)

diff --git 
a/gluten-delta/src/main/scala/org/apache/gluten/execution/DeltaScanTransformer.scala
 
b/gluten-delta/src/main/scala/org/apache/gluten/execution/DeltaScanTransformer.scala
index 16b764146..9e97a3687 100644
--- 
a/gluten-delta/src/main/scala/org/apache/gluten/execution/DeltaScanTransformer.scala
+++ 
b/gluten-delta/src/main/scala/org/apache/gluten/execution/DeltaScanTransformer.scala
@@ -52,7 +52,11 @@ case class DeltaScanTransformer(
   override lazy val fileFormat: ReadFileFormat = 
ReadFileFormat.ParquetReadFormat
 
   override protected def doValidateInternal(): ValidationResult = {
-    if (requiredSchema.fields.exists(_.name == 
"__delta_internal_is_row_deleted")) {
+    if (
+      requiredSchema.fields.exists(
+        _.name == "__delta_internal_is_row_deleted") || 
requiredSchema.fields.exists(
+        _.name == "__delta_internal_row_index")
+    ) {
       return ValidationResult.notOk(s"Deletion vector is not supported in 
native.")
     }
 
diff --git 
a/gluten-delta/src/test/scala/org/apache/gluten/execution/VeloxDeltaSuite.scala 
b/gluten-delta/src/test/scala/org/apache/gluten/execution/VeloxDeltaSuite.scala
index 8cf4baae8..f90bff2af 100644
--- 
a/gluten-delta/src/test/scala/org/apache/gluten/execution/VeloxDeltaSuite.scala
+++ 
b/gluten-delta/src/test/scala/org/apache/gluten/execution/VeloxDeltaSuite.scala
@@ -42,8 +42,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
   }
 
   // IdMapping is supported in Delta 2.2 (related to Spark3.3.1)
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion("column mapping mode = id", Some("3.3"), 
Some("3.4")) {
+  testWithSpecifiedSparkVersion("column mapping mode = id", Some("3.3")) {
     withTable("delta_cm1") {
       spark.sql(s"""
                    |create table delta_cm1 (id int, name string) using delta
@@ -63,8 +62,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
   }
 
   // NameMapping is supported in Delta 2.0 (related to Spark3.2.0)
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion("column mapping mode = name", Some("3.2"), 
Some("3.4")) {
+  testWithSpecifiedSparkVersion("column mapping mode = name", Some("3.2")) {
     withTable("delta_cm2") {
       spark.sql(s"""
                    |create table delta_cm2 (id int, name string) using delta
@@ -83,8 +81,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
     }
   }
 
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion("delta: time travel", Some("3.3"), 
Some("3.4")) {
+  testWithSpecifiedSparkVersion("delta: time travel", Some("3.3")) {
     withTable("delta_tm") {
       spark.sql(s"""
                    |create table delta_tm (id int, name string) using delta
@@ -109,8 +106,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
     }
   }
 
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion("delta: partition filters", Some("3.2"), 
Some("3.4")) {
+  testWithSpecifiedSparkVersion("delta: partition filters", Some("3.2")) {
     withTable("delta_pf") {
       spark.sql(s"""
                    |create table delta_pf (id int, name string) using delta 
partitioned by (name)
@@ -129,11 +125,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
     }
   }
 
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion(
-    "basic test with stats.skipping disabled",
-    Some("3.2"),
-    Some("3.4")) {
+  testWithSpecifiedSparkVersion("basic test with stats.skipping disabled", 
Some("3.2")) {
     withTable("delta_test2") {
       withSQLConf("spark.databricks.delta.stats.skipping" -> "false") {
         spark.sql(s"""
@@ -153,8 +145,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
     }
   }
 
-  // Disable for Spark3.5.
-  testWithSpecifiedSparkVersion("column mapping with complex type", 
Some("3.2"), Some("3.4")) {
+  testWithSpecifiedSparkVersion("column mapping with complex type", 
Some("3.2")) {
     withTable("t1") {
       val simpleNestedSchema = new StructType()
         .add("a", StringType, true)
@@ -204,8 +195,7 @@ class VeloxDeltaSuite extends WholeStageTransformerSuite {
     }
   }
 
-  // Disable for Spark3.5
-  testWithSpecifiedSparkVersion("deletion vector", Some("3.4"), Some("3.4")) {
+  testWithSpecifiedSparkVersion("deletion vector", Some("3.4")) {
     withTempPath {
       p =>
         import testImplicits._


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to