This is an automated email from the ASF dual-hosted git repository.

comphead pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git


The following commit(s) were added to refs/heads/main by this push:
     new fbca16346 chore: fix `pr_build*.yml` (#2434)
fbca16346 is described below

commit fbca16346f9a63008a3a97332db70018cf1ae30c
Author: Oleks V <[email protected]>
AuthorDate: Sun Sep 21 15:21:25 2025 -0700

    chore: fix `pr_build*.yml` (#2434)
    
    * chore: fix pr_build*.yml
    
    * clippy
    
    * chore: fix pr_build*.yml
---
 .github/workflows/pr_build_linux.yml                       | 4 ++--
 .github/workflows/pr_build_macos.yml                       | 4 ++--
 native/spark-expr/src/conversion_funcs/cast.rs             | 4 ++--
 spark/src/main/scala/org/apache/comet/serde/literals.scala | 4 +++-
 4 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/.github/workflows/pr_build_linux.yml 
b/.github/workflows/pr_build_linux.yml
index fe7df906d..9b918ad8b 100644
--- a/.github/workflows/pr_build_linux.yml
+++ b/.github/workflows/pr_build_linux.yml
@@ -151,7 +151,7 @@ jobs:
               org.apache.comet.objectstore.NativeConfigSuite
           - name: "sql"
             value: |
-              ${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' 
&& 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}}
+              org.apache.spark.sql.CometToPrettyStringSuite
       fail-fast: false
     name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name 
}}]
     runs-on: ${{ matrix.os }}
@@ -171,7 +171,7 @@ jobs:
         uses: ./.github/actions/java-test
         with:
           artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ 
matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ 
github.run_attempt }}
-          suites: ${{ matrix.suite.value }}
+          suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 
'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
           maven_opts: ${{ matrix.profile.maven_opts }}
           scan_impl: ${{ matrix.profile.scan_impl }}
           upload-test-reports: true
\ No newline at end of file
diff --git a/.github/workflows/pr_build_macos.yml 
b/.github/workflows/pr_build_macos.yml
index 1abe644f5..fb6a8295b 100644
--- a/.github/workflows/pr_build_macos.yml
+++ b/.github/workflows/pr_build_macos.yml
@@ -116,7 +116,7 @@ jobs:
               org.apache.comet.objectstore.NativeConfigSuite
           - name: "sql"
             value: |
-              ${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' 
&& 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}}
+              org.apache.spark.sql.CometToPrettyStringSuite
       fail-fast: false
     name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name 
}}]
     runs-on: ${{ matrix.os }}
@@ -133,5 +133,5 @@ jobs:
         uses: ./.github/actions/java-test
         with:
           artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ 
matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ 
github.run_attempt }}
-          suites: ${{ matrix.suite.value }}
+          suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 
'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
           maven_opts: ${{ matrix.profile.maven_opts }}
diff --git a/native/spark-expr/src/conversion_funcs/cast.rs 
b/native/spark-expr/src/conversion_funcs/cast.rs
index 0c7b437a5..af997ccf8 100644
--- a/native/spark-expr/src/conversion_funcs/cast.rs
+++ b/native/spark-expr/src/conversion_funcs/cast.rs
@@ -248,7 +248,7 @@ fn can_cast_from_string(to_type: &DataType, options: 
&SparkCastOptions) -> bool
     }
 }
 
-fn can_cast_to_string(from_type: &DataType, options: &SparkCastOptions) -> 
bool {
+fn can_cast_to_string(from_type: &DataType, _options: &SparkCastOptions) -> 
bool {
     use DataType::*;
     match from_type {
         Boolean | Int8 | Int16 | Int32 | Int64 | Date32 | Date64 | 
Timestamp(_, _) => true,
@@ -267,7 +267,7 @@ fn can_cast_to_string(from_type: &DataType, options: 
&SparkCastOptions) -> bool
         Binary => true,
         Struct(fields) => fields
             .iter()
-            .all(|f| can_cast_to_string(f.data_type(), options)),
+            .all(|f| can_cast_to_string(f.data_type(), _options)),
         _ => false,
     }
 }
diff --git a/spark/src/main/scala/org/apache/comet/serde/literals.scala 
b/spark/src/main/scala/org/apache/comet/serde/literals.scala
index c18755e07..312f12a4c 100644
--- a/spark/src/main/scala/org/apache/comet/serde/literals.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/literals.scala
@@ -132,7 +132,9 @@ object CometLiteral extends CometExpressionSerde[Literal] 
with Logging {
       case ByteType =>
         array.foreach(v => {
           val casted = v.asInstanceOf[lang.Byte]
-          listLiteralBuilder.addByteValues(casted.intValue())
+          listLiteralBuilder.addByteValues(
+            if (casted != null) casted.intValue()
+            else null.asInstanceOf[Integer])
           listLiteralBuilder.addNullMask(casted != null)
         })
       case ShortType =>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to