dongjoon-hyun commented on code in PR #39361:
URL: https://github.com/apache/spark/pull/39361#discussion_r1069027035


##########
connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala:
##########
@@ -16,17 +16,151 @@
  */
 package org.apache.spark.sql.connect.client
 
+import java.util.concurrent.TimeUnit
+
+import io.grpc.Server
+import io.grpc.netty.NettyServerBuilder
+import io.grpc.stub.StreamObserver
+import org.scalatest.BeforeAndAfterEach
 import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
-import org.apache.spark.connect.proto
 
-class SparkConnectClientSuite extends AnyFunSuite { // scalastyle:ignore 
funsuite
+import org.apache.spark.connect.proto.{AnalyzePlanRequest, 
AnalyzePlanResponse, SparkConnectServiceGrpc}
+import org.apache.spark.sql.connect.common.config.ConnectCommon
+
+class SparkConnectClientSuite
+    extends AnyFunSuite // scalastyle:ignore funsuite
+    with BeforeAndAfterEach {
+
+  private var client: SparkConnectClient = _
+  private var server: Server = _
+
+  private def startDummyServer(port: Int): Unit = {
+    val sb = NettyServerBuilder
+      .forPort(port)
+      .addService(new DummySparkConnectService())
+
+    server = sb.build
+    server.start()
+  }
+  override def beforeEach(): Unit = {
+    super.beforeEach()
+    client = null
+    server = null
+  }
+
+  override def afterEach(): Unit = {
+    if (server != null) {
+      server.shutdownNow()
+      assert(server.awaitTermination(5, TimeUnit.SECONDS), "server failed to 
shutdown")
+    }
 
-  private def createClient = {
-    new SparkConnectClient(proto.UserContext.newBuilder().build())
+    if (client != null) {
+      client.shutdown()
+    }
   }
 
   test("Placeholder test: Create SparkConnectClient") {
-    val client = SparkConnectClient.builder().userId("abc123").build()
+    client = SparkConnectClient.builder().userId("abc123").build()
     assert(client.userId == "abc123")
   }
+
+  private def testClientConnection(
+      client: SparkConnectClient,
+      serverPort: Int = ConnectCommon.CONNECT_GRPC_BINDING_PORT): Unit = {
+    startDummyServer(serverPort)
+    val request = AnalyzePlanRequest
+      .newBuilder()
+      .setClientId("abc123")
+      .build()
+
+    val response = client.analyze(request)
+    assert(response.getClientId === "abc123")
+  }
+
+  test("Test connection") {
+    val testPort = 16000
+    client = SparkConnectClient.builder().port(testPort).build()
+    testClientConnection(client, testPort)
+  }
+
+  test("Test connection string") {
+    val testPort = 16000
+    client = 
SparkConnectClient.builder().connectionString("sc://localhost:16000").build()
+    testClientConnection(client, testPort)
+  }
+
+  private case class TestPackURI(
+      connectionString: String,
+      isCorrect: Boolean,
+      extraChecks: SparkConnectClient => Unit = _ => {})
+
+  private val URIs = Seq[TestPackURI](
+    TestPackURI("sc://host", isCorrect = true),
+    TestPackURI("sc://localhost/", isCorrect = true, client => 
testClientConnection(client)),
+    TestPackURI(
+      "sc://localhost:123/",

Review Comment:
   This test seems to fail in GitHub Action environment if we enable this test 
suite.
   - https://github.com/apache/spark/pull/39549
   
   ```
   [info] - Check URI: sc://localhost:123/, isCorrect: true *** FAILED *** (6 
milliseconds)
   [info]   java.io.IOException: Failed to bind to address 0.0.0.0/0.0.0.0:123
   ...
   Cause: java.net.SocketException: Permission denied
   [info]   at sun.nio.ch.Net.bind0(Native Method)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to