This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f05403c27a8 [SPARK-45460][SQL] Replace 
`scala.collection.convert.ImplicitConversions` to 
`scala.jdk.CollectionConverters`
f05403c27a8 is described below

commit f05403c27a8fdb093ca680937bb55bbd187b9339
Author: panbingkun <[email protected]>
AuthorDate: Sun Oct 8 13:49:39 2023 -0700

    [SPARK-45460][SQL] Replace `scala.collection.convert.ImplicitConversions` 
to `scala.jdk.CollectionConverters`
    
    ### What changes were proposed in this pull request?
    The pr aims to replace `scala.collection.convert.ImplicitConversions` to 
`scala.jdk.CollectionConverters`.
    
    ### Why are the changes needed?
    Since scala 2.13.0,  `scala.collection.convert.ImplicitConversions` mark as 
deprecated. So this PR change all 
`scala.collection.convert.ImplicitConversions` to 
`scala.jdk.CollectionConverters`.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #43275 from panbingkun/SPARK-45460.
    
    Authored-by: panbingkun <[email protected]>
    Signed-off-by: Dongjoon Hyun <[email protected]>
---
 .../apache/spark/sql/connect/common/DataTypeProtoConverter.scala    | 6 +++---
 .../org/apache/spark/sql/connect/service/SparkConnectServer.scala   | 4 ++--
 core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala      | 4 ++--
 3 files changed, 7 insertions(+), 7 deletions(-)

diff --git 
a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/DataTypeProtoConverter.scala
 
b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/DataTypeProtoConverter.scala
index bac3da25461..34b5aae21dc 100644
--- 
a/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/DataTypeProtoConverter.scala
+++ 
b/connector/connect/common/src/main/scala/org/apache/spark/sql/connect/common/DataTypeProtoConverter.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.connect.common
 
-import scala.collection.convert.ImplicitConversions._
+import scala.jdk.CollectionConverters._
 
 import org.apache.spark.connect.proto
 import org.apache.spark.sql.types._
@@ -98,7 +98,7 @@ object DataTypeProtoConverter {
   }
 
   private def toCatalystStructType(t: proto.DataType.Struct): StructType = {
-    val fields = t.getFieldsList.toSeq.map { protoField =>
+    val fields = t.getFieldsList.asScala.toSeq.map { protoField =>
       val metadata = if (protoField.hasMetadata) {
         Metadata.fromJson(protoField.getMetadata)
       } else {
@@ -253,7 +253,7 @@ object DataTypeProtoConverter {
           .setStruct(
             proto.DataType.Struct
               .newBuilder()
-              .addAllFields(protoFields)
+              .addAllFields(protoFields.asJava)
               .build())
           .build()
 
diff --git 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala
 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala
index 26c1062bf34..fbaa9e0172a 100644
--- 
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala
+++ 
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/service/SparkConnectServer.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.connect.service
 
 import java.net.InetSocketAddress
 
-import scala.collection.convert.ImplicitConversions._
+import scala.jdk.CollectionConverters._
 
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.SparkSession
@@ -35,7 +35,7 @@ object SparkConnectServer extends Logging {
     try {
       try {
         SparkConnectService.start(session.sparkContext)
-        SparkConnectService.server.getListenSockets.foreach { sa =>
+        SparkConnectService.server.getListenSockets.asScala.foreach { sa =>
           val isa = sa.asInstanceOf[InetSocketAddress]
           logInfo(
             s"Spark Connect server started at: " +
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 8029275d838..00ac69b4a07 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets
 import java.nio.file.Files
 import java.util.Locale
 
-import scala.collection.convert.ImplicitConversions._
+import scala.jdk.CollectionConverters._
 import scala.util.Properties.lineSeparator
 import scala.util.matching.Regex
 
@@ -229,7 +229,7 @@ class SparkThrowableSuite extends SparkFunSuite {
       }).toSet
 
       val docsDir = getWorkspaceFilePath("docs")
-      val orphans = FileUtils.listFiles(docsDir.toFile, Array("md"), 
false).filter { f =>
+      val orphans = FileUtils.listFiles(docsDir.toFile, Array("md"), 
false).asScala.filter { f =>
         (f.getName.startsWith("sql-error-conditions-") && 
f.getName.endsWith("-error-class.md")) &&
           !subErrorFileNames.contains(f.getName)
       }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to