This is an automated email from the ASF dual-hosted git repository.

agrove pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new 236064df9 Add native_iceberg_compat tests to CI (#1487)
236064df9 is described below

commit 236064df96bce1fe56faa09c21de262a05f418c2
Author: Andy Grove <[email protected]>
AuthorDate: Mon Mar 10 05:40:49 2025 -0600

    Add native_iceberg_compat tests to CI (#1487)
---
 .github/workflows/pr_build.yml                                     | 7 ++++---
 spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala    | 3 +++
 .../src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala | 3 +++
 .../scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala | 6 ++++++
 4 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/pr_build.yml b/.github/workflows/pr_build.yml
index 23b684267..5418320f5 100644
--- a/.github/workflows/pr_build.yml
+++ b/.github/workflows/pr_build.yml
@@ -77,11 +77,10 @@ jobs:
           upload-test-reports: ${{ matrix.java_version == '17' }}
 
   linux-test-native-datafusion-scan:
-    env:
-      COMET_PARQUET_SCAN_IMPL: "native_datafusion"
     strategy:
       matrix:
         os: [ubuntu-latest]
+        scan_impl: ['native_datafusion', 'native_iceberg_compat']
         java_version: [17]
         test-target: [rust, java]
         spark-version: ['3.5']
@@ -89,8 +88,10 @@ jobs:
         is_push_event:
           - ${{ github.event_name == 'push' }}
       fail-fast: false
-    name: ${{ matrix.os }}/java ${{ matrix.java_version 
}}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ 
matrix.test-target }}-native-datafusion
+    name: ${{ matrix.os }}/java ${{ matrix.java_version 
}}-spark-${{matrix.spark-version}}-scala-${{matrix.scala-version}}/${{ 
matrix.test-target }}-${{ matrix.scan_impl }}
     runs-on: ${{ matrix.os }}
+    env:
+      COMET_PARQUET_SCAN_IMPL: ${{ matrix.scan_impl }}
     container:
       image: amd64/rust
     steps:
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala 
b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
index 781714983..c02c9ce5e 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
@@ -525,6 +525,9 @@ class CometExecSuite extends CometTestBase {
   }
 
   test("Comet native metrics: scan") {
+    // https://github.com/apache/datafusion-comet/issues/1441
+    assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT)
+
     withSQLConf(CometConf.COMET_EXEC_ENABLED.key -> "true") {
       withTempDir { dir =>
         val path = new Path(dir.toURI.toString, "native-scan.parquet")
diff --git 
a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala 
b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
index f6a57b12e..4099759cd 100644
--- a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
@@ -1028,6 +1028,9 @@ abstract class ParquetReadSuite extends CometTestBase {
   }
 
   test("scan metrics") {
+    // https://github.com/apache/datafusion-comet/issues/1441
+    assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT)
+
     val cometScanMetricNames = Seq(
       "ParquetRowGroups",
       "ParquetNativeDecodeTime",
diff --git 
a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala 
b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
index 76df1e96c..5f3c5c3ed 100644
--- 
a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
+++ 
b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
@@ -49,6 +49,9 @@ class ParquetEncryptionITCase extends QueryTest with 
SQLTestUtils {
   private val key2 = 
encoder.encodeToString("1234567890123451".getBytes(StandardCharsets.UTF_8))
 
   test("SPARK-34990: Write and read an encrypted parquet") {
+    // https://github.com/apache/datafusion-comet/issues/1488
+    assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT)
+
     import testImplicits._
 
     
Seq("org.apache.parquet.crypto.keytools.PropertiesDrivenCryptoFactory").foreach 
{
@@ -85,6 +88,9 @@ class ParquetEncryptionITCase extends QueryTest with 
SQLTestUtils {
   }
 
   test("SPARK-37117: Can't read files in Parquet encryption external key 
material mode") {
+    // https://github.com/apache/datafusion-comet/issues/1488
+    assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != 
CometConf.SCAN_NATIVE_ICEBERG_COMPAT)
+
     import testImplicits._
 
     
Seq("org.apache.parquet.crypto.keytools.PropertiesDrivenCryptoFactory").foreach 
{


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to