This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/spark-connect-swift.git


The following commit(s) were added to refs/heads/main by this push:
     new 7fe4882  [SPARK-51858] Support `SPARK_REMOTE`
7fe4882 is described below

commit 7fe48821dedc2d04942b5d81ef114aa67336faff
Author: Dongjoon Hyun <dongj...@apache.org>
AuthorDate: Mon Apr 21 23:57:09 2025 +0900

    [SPARK-51858] Support `SPARK_REMOTE`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to support `SPARK_REMOTE` environment variable.
    
    ### Why are the changes needed?
    
    For feature parity.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No behavior change. This is an additional way to set Spark connect remote 
string.
    
    ### How was this patch tested?
    
    Pass the CIs with the revised test pipeline.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #79 from dongjoon-hyun/SPARK-51858.
    
    Authored-by: Dongjoon Hyun <dongj...@apache.org>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 .github/workflows/build_and_test.yml                  |  4 +++-
 Sources/SparkConnect/SparkSession.swift               |  3 ++-
 Tests/SparkConnectTests/BuilderTests.swift            |  8 ++++++--
 Tests/SparkConnectTests/RuntimeConfTests.swift        | 10 ++++++----
 Tests/SparkConnectTests/SparkConnectClientTests.swift | 14 ++++++++------
 5 files changed, 25 insertions(+), 14 deletions(-)

diff --git a/.github/workflows/build_and_test.yml 
b/.github/workflows/build_and_test.yml
index f0d64b2..a785839 100644
--- a/.github/workflows/build_and_test.yml
+++ b/.github/workflows/build_and_test.yml
@@ -67,13 +67,15 @@ jobs:
 
   integration-test-linux:
     runs-on: ubuntu-latest
+    env:
+      SPARK_REMOTE: "sc://localhost:15003"
     services:
       spark:
         image: apache/spark:4.0.0-preview2
         env:
           SPARK_NO_DAEMONIZE: 1
         ports:
-          - 15002:15002
+          - 15003:15002
         options: --entrypoint /opt/spark/sbin/start-connect-server.sh
     steps:
     - uses: actions/checkout@v4
diff --git a/Sources/SparkConnect/SparkSession.swift 
b/Sources/SparkConnect/SparkSession.swift
index 480f55f..b57ec76 100644
--- a/Sources/SparkConnect/SparkSession.swift
+++ b/Sources/SparkConnect/SparkSession.swift
@@ -221,7 +221,8 @@ public actor SparkSession {
     /// Create a new ``SparkSession``. If `spark.remote` is not given, 
`sc://localhost:15002` is used.
     /// - Returns: A newly created `SparkSession`.
     func create() async throws -> SparkSession {
-      let session = SparkSession(sparkConf["spark.remote"] ?? 
"sc://localhost:15002")
+      let remote = ProcessInfo.processInfo.environment["SPARK_REMOTE"] ?? 
"sc://localhost:15002"
+      let session = SparkSession(sparkConf["spark.remote"] ?? remote)
       let response = try await session.client.connect(session.sessionID)
       await session.setVersion(response.sparkVersion.version)
       let isSuccess = try await session.client.setConf(map: sparkConf)
diff --git a/Tests/SparkConnectTests/BuilderTests.swift 
b/Tests/SparkConnectTests/BuilderTests.swift
index 91fee76..de2b31d 100644
--- a/Tests/SparkConnectTests/BuilderTests.swift
+++ b/Tests/SparkConnectTests/BuilderTests.swift
@@ -17,6 +17,7 @@
 // under the License.
 //
 
+import Foundation
 import Testing
 
 @testable import SparkConnect
@@ -24,12 +25,15 @@ import Testing
 /// A test suite for `SparkSession.Builder`
 @Suite(.serialized)
 struct BuilderTests {
+  let TEST_REMOTE = ProcessInfo.processInfo.environment["SPARK_REMOTE"] ?? 
"sc://localhost:15002"
+
   @Test
   func builderDefault() async throws {
+    let url = URL(string: self.TEST_REMOTE)!
     let spark = try await SparkSession.builder.getOrCreate()
     #expect(await spark.client.clientType == "swift")
-    #expect(await spark.client.url.host() == "localhost")
-    #expect(await spark.client.url.port == 15002)
+    #expect(await spark.client.url.host() == url.host())
+    #expect(await spark.client.url.port == url.port)
     await spark.stop()
   }
 
diff --git a/Tests/SparkConnectTests/RuntimeConfTests.swift 
b/Tests/SparkConnectTests/RuntimeConfTests.swift
index 4452085..cedab9d 100644
--- a/Tests/SparkConnectTests/RuntimeConfTests.swift
+++ b/Tests/SparkConnectTests/RuntimeConfTests.swift
@@ -25,9 +25,11 @@ import Testing
 /// A test suite for `RuntimeConf`
 @Suite(.serialized)
 struct RuntimeConfTests {
+  let TEST_REMOTE = ProcessInfo.processInfo.environment["SPARK_REMOTE"] ?? 
"sc://localhost"
+
   @Test
   func get() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     _ = try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
 
@@ -42,7 +44,7 @@ struct RuntimeConfTests {
 
   @Test
   func set() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     _ = try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
     try await conf.set("spark.test.key1", "value1")
@@ -52,7 +54,7 @@ struct RuntimeConfTests {
 
   @Test
   func reset() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     _ = try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
 
@@ -73,7 +75,7 @@ struct RuntimeConfTests {
 
   @Test
   func getAll() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     _ = try await client.connect(UUID().uuidString)
     let conf = RuntimeConf(client)
     let map = try await conf.getAll()
diff --git a/Tests/SparkConnectTests/SparkConnectClientTests.swift 
b/Tests/SparkConnectTests/SparkConnectClientTests.swift
index a1ef083..a9c1c22 100644
--- a/Tests/SparkConnectTests/SparkConnectClientTests.swift
+++ b/Tests/SparkConnectTests/SparkConnectClientTests.swift
@@ -25,9 +25,11 @@ import Testing
 /// A test suite for `SparkConnectClient`
 @Suite(.serialized)
 struct SparkConnectClientTests {
+  let TEST_REMOTE = ProcessInfo.processInfo.environment["SPARK_REMOTE"] ?? 
"sc://localhost"
+
   @Test
   func createAndStop() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     await client.stop()
   }
 
@@ -44,7 +46,7 @@ struct SparkConnectClientTests {
 
   @Test
   func connectWithInvalidUUID() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     try await #require(throws: SparkConnectError.InvalidSessionIDException) {
       let _ = try await client.connect("not-a-uuid-format")
     }
@@ -53,14 +55,14 @@ struct SparkConnectClientTests {
 
   @Test
   func connect() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     let _ = try await client.connect(UUID().uuidString)
     await client.stop()
   }
 
   @Test
   func tags() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     let _ = try await client.connect(UUID().uuidString)
     let plan = await client.getPlanRange(0, 1, 1)
 
@@ -76,7 +78,7 @@ struct SparkConnectClientTests {
 
   @Test
   func ddlParse() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     let _ = try await client.connect(UUID().uuidString)
     #expect(try await client.ddlParse("a int").simpleString == "struct<a:int>")
     await client.stop()
@@ -85,7 +87,7 @@ struct SparkConnectClientTests {
 #if !os(Linux) // TODO: Enable this with the offical Spark 4 docker image
   @Test
   func jsonToDdl() async throws {
-    let client = SparkConnectClient(remote: "sc://localhost")
+    let client = SparkConnectClient(remote: TEST_REMOTE)
     let _ = try await client.connect(UUID().uuidString)
     let json =
       
#"{"type":"struct","fields":[{"name":"id","type":"long","nullable":false,"metadata":{}}]}"#


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to