hvanhovell commented on code in PR #39361:
URL: https://github.com/apache/spark/pull/39361#discussion_r1064829476


##########
connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientSuite.scala:
##########
@@ -16,17 +16,143 @@
  */
 package org.apache.spark.sql.connect.client
 
+import java.util.concurrent.TimeUnit
+
+import io.grpc.Server
+import io.grpc.netty.NettyServerBuilder
+import io.grpc.stub.StreamObserver
+import org.scalatest.BeforeAndAfterEach
 import org.scalatest.funsuite.AnyFunSuite // scalastyle:ignore funsuite
-import org.apache.spark.connect.proto
 
-class SparkConnectClientSuite extends AnyFunSuite { // scalastyle:ignore 
funsuite
+import org.apache.spark.connect.proto.{AnalyzePlanRequest, 
AnalyzePlanResponse, SparkConnectServiceGrpc}
+
+class SparkConnectClientSuite
+    extends AnyFunSuite // scalastyle:ignore funsuite
+    with BeforeAndAfterEach {
+
+  private var client: SparkConnectClient = _
+  private var server: Server = _
+
+  private def startDummyServer(port: Int): Unit = {
+    val sb = NettyServerBuilder
+      .forPort(port)
+      .addService(new DummySparkConnectService())
+
+    server = sb.build
+    server.start()
+  }
+  override def beforeEach(): Unit = {
+    super.beforeEach()
+    client = null
+    server = null
+  }
+
+  override def afterEach(): Unit = {
+    if (server != null) {
+      server.shutdownNow()
+      assert(server.awaitTermination(5, TimeUnit.SECONDS), "server failed to 
shutdown")
+    }
 
-  private def createClient = {
-    new SparkConnectClient(proto.UserContext.newBuilder().build())
+    if (client != null) {
+      client.shutdown()
+    }
   }
 
   test("Placeholder test: Create SparkConnectClient") {
-    val client = SparkConnectClient.builder().userId("abc123").build()
+    client = SparkConnectClient.builder().userId("abc123").build()
     assert(client.userId == "abc123")
   }
+
+  test("Test connection") {
+    val testPort = 16000
+    startDummyServer(testPort)
+    client = SparkConnectClient.builder().port(testPort).build()
+    val request = AnalyzePlanRequest
+      .newBuilder()
+      .setClientId("abc123")
+      .build()
+
+    val response = client.analyze(request)
+    assert(response.getClientId === "abc123")
+  }
+
+  test("Test connection string") {
+    val testPort = 16000
+    startDummyServer(testPort)
+    client = 
SparkConnectClient.builder().connectionString("sc://localhost:16000").build()
+    val request = AnalyzePlanRequest
+      .newBuilder()
+      .setClientId("abc123")
+      .build()
+
+    val response = client.analyze(request)
+    assert(response.getClientId === "abc123")
+  }
+
+  private case class testPackURI(

Review Comment:
   NIT Class names should start with a capital letter?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to