This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 613297eae [KYUUBI #5195] Make Spark TPC-H connector plugin compilable 
on Scala 2.13
613297eae is described below

commit 613297eae0b41f3526e0d126702e5cf4fcd05f62
Author: liangbowen <[email protected]>
AuthorDate: Wed Aug 23 21:17:53 2023 +0800

    [KYUUBI #5195] Make Spark TPC-H connector plugin compilable on Scala 2.13
    
    ### _Why are the changes needed?_
    
    - Make Spark TPC-H connector compilable on Scala 2.13
    - adding spark extensions to scala 2.13 compilation CI test
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests)
 locally before make a pull request
    
    ### _Was this patch authored or co-authored using generative AI tooling?_
    
    Closes #5195 from bowenliang123/scala213-conn.
    
    Closes #5195
    
    ad2558a16 [liangbowen] update
    c50d29111 [liangbowen] update
    d38eb3b6c [liangbowen] adapt spark connector plugin compilable on Scala 2.13
    
    Authored-by: liangbowen <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .github/workflows/master.yml                                       | 7 ++++---
 .../org/apache/kyuubi/spark/connector/tpch/TPCHBatchScan.scala     | 2 +-
 .../scala/org/apache/kyuubi/spark/connector/tpch/TPCHTable.scala   | 2 +-
 3 files changed, 6 insertions(+), 5 deletions(-)

diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
index 3bf270cb1..0e050b4a7 100644
--- a/.github/workflows/master.yml
+++ b/.github/workflows/master.yml
@@ -191,13 +191,14 @@ jobs:
           check-latest: false
       - name: Setup Maven
         uses: ./.github/actions/setup-maven
-      - name: Build on Scala
+      - name: Build on Scala ${{ matrix.scala }}
         run: |
-          MODULES="kyuubi-server"
+          MODULES='!externals/kyuubi-flink-sql-engine'
           ./build/mvn clean install -pl ${MODULES} -am \
           -DskipTests -Pflink-provided,hive-provided,spark-provided \
           -Pjava-${{ matrix.java }} \
-          -Pscala-${{ matrix.scala }}
+          -Pscala-${{ matrix.scala }} \
+          -Pspark-3.3
 
   flink-it:
     name: Flink Test
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHBatchScan.scala
 
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHBatchScan.scala
index b5bca42cc..63ff82b7a 100644
--- 
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHBatchScan.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHBatchScan.scala
@@ -144,7 +144,7 @@ class TPCHPartitionReader(
           case (value, dt) => throw new IllegalArgumentException(s"value: 
$value, type: $dt")
         }
       }
-      InternalRow.fromSeq(rowAny)
+      InternalRow.fromSeq(rowAny.toSeq)
     }
     hasNext
   }
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHTable.scala
 
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHTable.scala
index de4bd49f2..65038d35b 100644
--- 
a/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHTable.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-tpch/src/main/scala/org/apache/kyuubi/spark/connector/tpch/TPCHTable.scala
@@ -44,7 +44,7 @@ class TPCHTable(tbl: String, scale: Double, tpchConf: 
TPCHConf)
     StructType(
       
tpchTable.asInstanceOf[TpchTable[TpchEntity]].getColumns.zipWithIndex.map { 
case (c, _) =>
         StructField(c.getColumnName, toSparkDataType(c.getType))
-      })
+      }.toSeq)
   }
 
   override def capabilities(): util.Set[TableCapability] =

Reply via email to