This is an automated email from the ASF dual-hosted git repository.

hvanhovell pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new 594c8fe2e0a [SPARK-42884][CONNECT] Add Ammonite REPL integration
594c8fe2e0a is described below

commit 594c8fe2e0a15e4b2462afcdfcc310c8a21d55e0
Author: Herman van Hovell <her...@databricks.com>
AuthorDate: Fri Mar 24 13:15:50 2023 -0400

    [SPARK-42884][CONNECT] Add Ammonite REPL integration
    
    ### What changes were proposed in this pull request?
    This PR adds Ammonite REPL integration for Spark Connect. This has a couple 
of benefits:
    - It makes it a lot less cumbersome for users to start a spark connect 
REPL. You don't have to add custom scripts, and you can use `coursier` to 
launch a fully function REPL for you.
    - It adds REPL integration for to the actual build. This makes it easier to 
validate the code we add is actually working.
    
    ### Why are the changes needed?
    A REPL is arguably the first entry point for a lot of users.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes it adds REPL integration.
    
    ### How was this patch tested?
    Added tests for the command line parsing. Manually tested the REPL.
    
    Closes #40515 from hvanhovell/SPARK-42884.
    
    Authored-by: Herman van Hovell <her...@databricks.com>
    Signed-off-by: Herman van Hovell <her...@databricks.com>
    (cherry picked from commit acf3065cc9e91e73db335a050289153582c45ce5)
    Signed-off-by: Herman van Hovell <her...@databricks.com>
---
 connector/connect/bin/spark-connect-scala-client   |   5 +-
 .../connect/bin/spark-connect-scala-client.sc      |  34 ------
 connector/connect/client/jvm/pom.xml               |  12 ++
 .../apache/spark/sql/application/ConnectRepl.scala |  88 ++++++++++++++
 .../sql/connect/client/SparkConnectClient.scala    |  79 ++++++++++---
 .../connect/client/SparkConnectClientParser.scala  |  93 +++++++++++++++
 .../SparkConnectClientBuilderParseTestSuite.scala  | 131 +++++++++++++++++++++
 pom.xml                                            |   2 +
 8 files changed, 387 insertions(+), 57 deletions(-)

diff --git a/connector/connect/bin/spark-connect-scala-client 
b/connector/connect/bin/spark-connect-scala-client
index 8c5e687ef24..e7a15c56d7c 100755
--- a/connector/connect/bin/spark-connect-scala-client
+++ b/connector/connect/bin/spark-connect-scala-client
@@ -37,7 +37,6 @@ export SPARK_HOME=$FWDIR
 # Determine the Scala version used in Spark
 SCALA_BINARY_VER=`grep "scala.binary.version" "${SPARK_HOME}/pom.xml" | head 
-n1 | awk -F '[<>]' '{print $3}'`
 SCALA_VER=`grep "scala.version" "${SPARK_HOME}/pom.xml" | grep 
${SCALA_BINARY_VER} | head -n1 | awk -F '[<>]' '{print $3}'`
-SCALA_BIN="${SPARK_HOME}/build/scala-${SCALA_VER}/bin/scala"
 SCALA_ARG="-Pscala-${SCALA_BINARY_VER}"
 
 # Build the jars needed for spark connect JVM client
@@ -46,6 +45,4 @@ build/sbt "${SCALA_ARG}" 
"sql/package;connect-client-jvm/assembly"
 CONNECT_CLASSPATH="$(build/sbt "${SCALA_ARG}" -DcopyDependencies=false "export 
connect-client-jvm/fullClasspath" | grep jar | tail -n1)"
 SQL_CLASSPATH="$(build/sbt "${SCALA_ARG}" -DcopyDependencies=false "export 
sql/fullClasspath" | grep jar | tail -n1)"
 
-INIT_SCRIPT="${SPARK_HOME}"/connector/connect/bin/spark-connect-scala-client.sc
-
-exec "${SCALA_BIN}" -cp "$CONNECT_CLASSPATH:$SQL_CLASSPATH" -i $INIT_SCRIPT
+exec java -cp "$CONNECT_CLASSPATH:$SQL_CLASSPATH" 
org.apache.spark.sql.application.ConnectRepl "$@"
\ No newline at end of file
diff --git a/connector/connect/bin/spark-connect-scala-client.sc 
b/connector/connect/bin/spark-connect-scala-client.sc
deleted file mode 100644
index 9cb4f92417d..00000000000
--- a/connector/connect/bin/spark-connect-scala-client.sc
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import org.apache.spark.sql.functions._
-import org.apache.spark.sql.SparkSession
-
-val conStr = if (sys.env.contains("SPARK_REMOTE")) sys.env("SPARK_REMOTE") 
else ""
-val sessionBuilder = SparkSession.builder()
-val spark = if (conStr.isEmpty) sessionBuilder.build() else 
sessionBuilder.remote(conStr).build()
-import spark.implicits._
-import spark.sql
-println("Spark session available as 'spark'.")
-println(
-  """
-    |   _____                  __      ______                            __
-    |  / ___/____  ____ ______/ /__   / ____/___  ____  ____  ___  _____/ /_
-    |  \__ \/ __ \/ __ `/ ___/ //_/  / /   / __ \/ __ \/ __ \/ _ \/ ___/ __/
-    | ___/ / /_/ / /_/ / /  / ,<    / /___/ /_/ / / / / / / /  __/ /__/ /_
-    |/____/ .___/\__,_/_/  /_/|_|   \____/\____/_/ /_/_/ /_/\___/\___/\__/
-    |    /_/
-    |""".stripMargin)
\ No newline at end of file
diff --git a/connector/connect/client/jvm/pom.xml 
b/connector/connect/client/jvm/pom.xml
index 9fa818f7939..b8b0e8e459a 100644
--- a/connector/connect/client/jvm/pom.xml
+++ b/connector/connect/client/jvm/pom.xml
@@ -95,6 +95,18 @@
       <artifactId>netty-transport-native-unix-common</artifactId>
       <version>${netty.version}</version>
     </dependency>
+    <dependency>
+      <groupId>com.lihaoyi</groupId>
+      <artifactId>ammonite_${scala.version}</artifactId>
+      <version>${ammonite.version}</version>
+      <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.scala-lang.modules</groupId>
+          <artifactId>scala-xml_${scala.binary.version}</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-connect-common_${scala.binary.version}</artifactId>
diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/application/ConnectRepl.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/application/ConnectRepl.scala
new file mode 100644
index 00000000000..ec31697ee59
--- /dev/null
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/application/ConnectRepl.scala
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.application
+
+import scala.util.control.NonFatal
+
+import ammonite.compiler.CodeClassWrapper
+import ammonite.util.Bind
+
+import org.apache.spark.annotation.DeveloperApi
+import org.apache.spark.sql.SparkSession
+import org.apache.spark.sql.connect.client.{SparkConnectClient, 
SparkConnectClientParser}
+
+/**
+ * REPL for spark connect.
+ */
+@DeveloperApi
+object ConnectRepl {
+  private val name = "Spark Connect REPL"
+
+  private val splash =
+    """
+      |Spark session available as 'spark'.
+      |   _____                  __      ______                            __
+      |  / ___/____  ____ ______/ /__   / ____/___  ____  ____  ___  _____/ /_
+      |  \__ \/ __ \/ __ `/ ___/ //_/  / /   / __ \/ __ \/ __ \/ _ \/ ___/ __/
+      | ___/ / /_/ / /_/ / /  / ,<    / /___/ /_/ / / / / / / /  __/ /__/ /_
+      |/____/ .___/\__,_/_/  /_/|_|   \____/\____/_/ /_/_/ /_/\___/\___/\__/
+      |    /_/
+      |""".stripMargin
+
+  def main(args: Array[String]): Unit = {
+    // Build the client.
+    val client =
+      try {
+        SparkConnectClient
+          .builder()
+          .loadFromEnvironment()
+          .userAgent(name)
+          .parse(args)
+          .build()
+      } catch {
+        case NonFatal(e) =>
+          // scalastyle:off println
+          println(s"""
+             |$name
+             |${e.getMessage}
+             |${SparkConnectClientParser.usage()}
+             |""".stripMargin)
+          // scalastyle:on println
+          sys.exit(1)
+      }
+
+    // Build the session.
+    val spark = SparkSession.builder().client(client).build()
+
+    // Add the proper imports.
+    val imports =
+      """
+        |import org.apache.spark.sql.functions._
+        |import spark.implicits._
+        |import spark.sql
+        |""".stripMargin
+
+    // Please note that we make ammonite generate classes instead of objects.
+    // Classes tend to have superior serialization behavior when using UDFs.
+    val main = ammonite.Main(
+      welcomeBanner = Option(splash),
+      predefCode = imports,
+      replCodeWrapper = CodeClassWrapper,
+      scriptCodeWrapper = CodeClassWrapper)
+    main.run(new Bind("spark", spark))
+  }
+}
diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala
index a508f3c067f..fd9ced6eb62 100644
--- 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClient.scala
@@ -209,7 +209,9 @@ private[sql] class SparkConnectClient(
   }
 }
 
-private[sql] object SparkConnectClient {
+object SparkConnectClient {
+
+  private val SPARK_REMOTE: String = "SPARK_REMOTE"
 
   private val DEFAULT_USER_AGENT: String = "_SPARK_CONNECT_SCALA"
 
@@ -221,7 +223,9 @@ private[sql] object SparkConnectClient {
       "Either remove 'token' or set 'use_ssl=true'"
 
   // for internal tests
-  def apply(userContext: UserContext, builder: ManagedChannelBuilder[_]): 
SparkConnectClient =
+  private[sql] def apply(
+      userContext: UserContext,
+      builder: ManagedChannelBuilder[_]): SparkConnectClient =
     new SparkConnectClient(userContext, builder, DEFAULT_USER_AGENT)
 
   def builder(): Builder = new Builder()
@@ -232,38 +236,49 @@ private[sql] object SparkConnectClient {
    */
   class Builder() {
     private val userContextBuilder = proto.UserContext.newBuilder()
-    private var userAgent: Option[String] = None
+    private var _userAgent: String = DEFAULT_USER_AGENT
 
-    private var host: String = "localhost"
-    private var port: Int = ConnectCommon.CONNECT_GRPC_BINDING_PORT
+    private var _host: String = "localhost"
+    private var _port: Int = ConnectCommon.CONNECT_GRPC_BINDING_PORT
 
-    private var token: Option[String] = None
+    private var _token: Option[String] = None
     // If no value specified for isSslEnabled, default to false
     private var isSslEnabled: Option[Boolean] = None
 
     private var metadata: Map[String, String] = Map.empty
 
     def userId(id: String): Builder = {
+      // TODO this is not an optional field!
+      require(id != null && id.nonEmpty)
       userContextBuilder.setUserId(id)
       this
     }
 
+    def userId: Option[String] = 
Option(userContextBuilder.getUserId).filter(_.nonEmpty)
+
     def userName(name: String): Builder = {
+      require(name != null && name.nonEmpty)
       userContextBuilder.setUserName(name)
       this
     }
 
+    def userName: Option[String] = 
Option(userContextBuilder.getUserName).filter(_.nonEmpty)
+
     def host(inputHost: String): Builder = {
       require(inputHost != null)
-      host = inputHost
+      _host = inputHost
       this
     }
 
+    def host: String = _host
+
     def port(inputPort: Int): Builder = {
-      port = inputPort
+      _port = inputPort
       this
     }
 
+    def port: Int = _port
+
     /**
      * Setting the token implicitly sets the use_ssl=true. All the following 
examples yield the
      * same results:
@@ -283,7 +298,7 @@ private[sql] object SparkConnectClient {
      */
     def token(inputToken: String): Builder = {
       require(inputToken != null && inputToken.nonEmpty)
-      token = Some(inputToken)
+      _token = Some(inputToken)
       // Only set the isSSlEnabled if it is not yet set
       isSslEnabled match {
         case None => isSslEnabled = Some(true)
@@ -294,6 +309,8 @@ private[sql] object SparkConnectClient {
       this
     }
 
+    def token: Option[String] = _token
+
     def enableSsl(): Builder = {
       isSslEnabled = Some(true)
       this
@@ -306,11 +323,13 @@ private[sql] object SparkConnectClient {
      *   this builder.
      */
     def disableSsl(): Builder = {
-      require(token.isEmpty, AUTH_TOKEN_ON_INSECURE_CONN_ERROR_MSG)
+      require(_token.isEmpty, AUTH_TOKEN_ON_INSECURE_CONN_ERROR_MSG)
       isSslEnabled = Some(false)
       this
     }
 
+    def sslEnabled: Boolean = isSslEnabled.contains(true)
+
     private object URIParams {
       val PARAM_USER_ID = "user_id"
       val PARAM_USE_SSL = "use_ssl"
@@ -343,10 +362,19 @@ private[sql] object SparkConnectClient {
 
     def userAgent(value: String): Builder = {
       require(value != null)
-      userAgent = Some(value)
+      _userAgent = value
+      this
+    }
+
+    def userAgent: String = _userAgent
+
+    def option(key: String, value: String): Builder = {
+      metadata += ((key, value))
       this
     }
 
+    def options: Map[String, String] = metadata
+
     private def parseURIParams(uri: URI): Unit = {
       val params = uri.getPath.split(';').drop(1).filter(_ != "")
       params.foreach { kv =>
@@ -370,6 +398,14 @@ private[sql] object SparkConnectClient {
       }
     }
 
+    /**
+     * Configure the builder using the env SPARK_REMOTE environment variable.
+     */
+    def loadFromEnvironment(): Builder = {
+      sys.env.get("SPARK_REMOTE").foreach(connectionString)
+      this
+    }
+
     /**
      * Creates the channel with a target connection string, per the 
documentation of Spark
      * Connect.
@@ -380,19 +416,27 @@ private[sql] object SparkConnectClient {
       val uri = new URI(connectionString)
       verifyURI(uri)
       parseURIParams(uri)
-      host = uri.getHost
+      _host = uri.getHost
       val inputPort = uri.getPort
       if (inputPort != -1) {
-        port = inputPort
+        _port = inputPort
       }
       this
     }
 
+    /**
+     * Configure the builder with the given CLI arguments.
+     */
+    def parse(args: Array[String]): Builder = {
+      SparkConnectClientParser.parse(args.toList, this)
+      this
+    }
+
     def build(): SparkConnectClient = {
       val creds = isSslEnabled match {
         case Some(false) | None => InsecureChannelCredentials.create()
         case Some(true) =>
-          token match {
+          _token match {
             case Some(t) =>
               // With access token added in the http header.
               CompositeChannelCredentials.create(
@@ -403,15 +447,12 @@ private[sql] object SparkConnectClient {
           }
       }
 
-      val channelBuilder = Grpc.newChannelBuilderForAddress(host, port, creds)
+      val channelBuilder = Grpc.newChannelBuilderForAddress(_host, _port, 
creds)
       if (metadata.nonEmpty) {
         channelBuilder.intercept(new MetadataHeaderClientInterceptor(metadata))
       }
       
channelBuilder.maxInboundMessageSize(ConnectCommon.CONNECT_GRPC_MAX_MESSAGE_SIZE)
-      new SparkConnectClient(
-        userContextBuilder.build(),
-        channelBuilder,
-        userAgent.getOrElse(DEFAULT_USER_AGENT))
+      new SparkConnectClient(userContextBuilder.build(), channelBuilder, 
_userAgent)
     }
   }
 
diff --git 
a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClientParser.scala
 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClientParser.scala
new file mode 100644
index 00000000000..dda769dc2ad
--- /dev/null
+++ 
b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/connect/client/SparkConnectClientParser.scala
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.connect.client
+
+import scala.annotation.tailrec
+
+/**
+ * Parser that takes an array of (CLI) arguments and configures a 
[[SparkConnectClient]] with
+ * them.
+ */
+private[sql] object SparkConnectClientParser {
+
+  /**
+   * @return
+   *   usage string.
+   */
+  def usage(): String =
+    s"""
+       |Options:
+       |   --remote REMOTE          URI of the Spark Connect Server to connect 
to.
+       |   --host HOST              Host where the Spark Connect Server is 
running.
+       |   --port PORT              Port where the Spark Connect Server is 
running.
+       |   --enable-ssl             Connect to the server using SSL.
+       |   --token TOKEN            Token to use for authentication.
+       |   --user_id USER_ID        Id of the user connecting.
+       |   --user_name USER_NAME    Name of the user connecting.
+       |   --option KEY=VALUE       Key-value pair that is used to further 
configure the session.
+     """.stripMargin
+
+  /**
+   * Parse the command line and configure the builder.
+   */
+  @tailrec
+  def parse(args: List[String], builder: SparkConnectClient.Builder): Unit = {
+    args match {
+      case Nil => ()
+      case "--remote" :: tail =>
+        val (value, remainder) = extract("--remote", tail)
+        parse(remainder, builder.connectionString(value))
+      case "--host" :: tail =>
+        val (value, remainder) = extract("--host", tail)
+        parse(remainder, builder.host(value))
+      case "--port" :: tail =>
+        val (value, remainder) = extract("--port", tail)
+        parse(remainder, builder.port(value.toInt))
+      case "--token" :: tail =>
+        val (value, remainder) = extract("--token", tail)
+        parse(remainder, builder.token(value))
+      case "--use_ssl" :: tail =>
+        parse(tail, builder.enableSsl())
+      case "--user_id" :: tail =>
+        val (value, remainder) = extract("--user_id", tail)
+        parse(remainder, builder.userId(value))
+      case "--user_name" :: tail =>
+        val (value, remainder) = extract("--user_name", tail)
+        parse(remainder, builder.userName(value))
+      case "--user_agent" :: tail =>
+        val (value, remainder) = extract("--user_agent", tail)
+        parse(remainder, builder.userAgent(value))
+      case "--option" :: tail =>
+        if (args.isEmpty) {
+          throw new IllegalArgumentException("--option requires a key-value 
pair")
+        }
+        val Array(key, value, rest @ _*) = tail.head.split('=')
+        if (rest.nonEmpty) {
+          throw new IllegalArgumentException(
+            s"--option should contain key=value, found ${tail.head} instead")
+        }
+        parse(tail.tail, builder.option(key, value))
+      case unsupported :: _ =>
+        throw new IllegalArgumentException(s"$unsupported is an unsupported 
argument.")
+    }
+  }
+
+  private def extract(name: String, args: List[String]): (String, 
List[String]) = {
+    require(args.nonEmpty, s"$name option requires a value")
+    (args.head, args.tail)
+  }
+}
diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientBuilderParseTestSuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientBuilderParseTestSuite.scala
new file mode 100644
index 00000000000..2c6886d0386
--- /dev/null
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/SparkConnectClientBuilderParseTestSuite.scala
@@ -0,0 +1,131 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.connect.client
+
+import org.apache.spark.sql.connect.client.util.ConnectFunSuite
+
+/**
+ * Test suite for [[SparkConnectClient.Builder]] parsing and configuration.
+ */
+class SparkConnectClientBuilderParseTestSuite extends ConnectFunSuite {
+  private def build(args: String*): SparkConnectClient.Builder = {
+    SparkConnectClient.builder().parse(args.toArray)
+  }
+
+  private def argumentTest(
+      name: String,
+      value: String,
+      extractor: SparkConnectClient.Builder => String): Unit = {
+    test("Argument - " + name) {
+      val builder = build("--" + name, value)
+      assert(value === extractor(builder))
+      val e = intercept[IllegalArgumentException] {
+        build("--" + name)
+      }
+      assert(e.getMessage.contains("option requires a value"))
+    }
+  }
+
+  argumentTest("host", "www.apache.org", _.host)
+  argumentTest("port", "1506", _.port.toString)
+  argumentTest("token", "azbycxdwev1234567890", _.token.get)
+  argumentTest("user_id", "U1238", _.userId.get)
+  argumentTest("user_name", "alice", _.userName.get)
+  argumentTest("user_agent", "MY APP", _.userAgent)
+
+  test("Argument - remote") {
+    val builder =
+      build("--remote", 
"sc://srv.apache.org/;user_id=x127;user_name=Q;token=nahnah;param1=x")
+    assert(builder.host === "srv.apache.org")
+    assert(builder.port === 15002)
+    assert(builder.token.contains("nahnah"))
+    assert(builder.userId.contains("x127"))
+    assert(builder.options === Map(("user_name", "Q"), ("param1", "x")))
+  }
+
+  test("Argument - use_ssl") {
+    val builder = build("--use_ssl")
+    assert(builder.sslEnabled)
+  }
+
+  test("Argument - option") {
+    val builder =
+      build("--option", "foo=bar", "--option", "c1=s8", "--option", 
"ns.sns.setting=baz")
+    assert(builder.options === Map(("foo", "bar"), ("c1", "s8"), 
("ns.sns.setting", "baz")))
+    val e1 = intercept[NoSuchElementException](build("--option"))
+    // assert(e1.getMessage.contains("requires a key-value pair"))
+    intercept[MatchError](build("--option", "not_a_config"))
+    val e2 = intercept[IllegalArgumentException](build("--option", 
"bar=baz=bak"))
+    assert(e2.getMessage.contains("should contain key=value"))
+  }
+
+  test("Argument - unsupported") {
+    val e = intercept[IllegalArgumentException](build("--unknown"))
+    assert(e.getMessage.contains("is an unsupported argument"))
+  }
+
+  test("SparkSession - create") {
+    {
+      val builder = build(
+        "--remote",
+        "sc://localhost:15033",
+        "--port",
+        "1507",
+        "--user_agent",
+        "U8912",
+        "--user_id",
+        "Q12")
+      assert(builder.host === "localhost")
+      assert(builder.port === 1507)
+      assert(builder.userAgent === "U8912")
+      assert(!builder.sslEnabled)
+      assert(builder.token.isEmpty)
+      assert(builder.userId.contains("Q12"))
+      assert(builder.userName.isEmpty)
+      assert(builder.options.isEmpty)
+    }
+    {
+      val builder = build(
+        "--use_ssl",
+        "--user_name",
+        "Nico",
+        "--option",
+        "mode=turbo",
+        "--option",
+        "cluster=mycl")
+      assert(builder.host === "localhost")
+      assert(builder.port === 15002)
+      assert(builder.userAgent == "_SPARK_CONNECT_SCALA")
+      assert(builder.sslEnabled)
+      assert(builder.token.isEmpty)
+      assert(builder.userId.isEmpty)
+      assert(builder.userName.contains("Nico"))
+      assert(builder.options === Map(("mode", "turbo"), ("cluster", "mycl")))
+    }
+    {
+      val builder = build("--token", "thisismysecret")
+      assert(builder.host === "localhost")
+      assert(builder.port === 15002)
+      assert(builder.userAgent === "_SPARK_CONNECT_SCALA")
+      assert(builder.sslEnabled)
+      assert(builder.token.contains("thisismysecret"))
+      assert(builder.userId.isEmpty)
+      assert(builder.userName.isEmpty)
+      assert(builder.options.isEmpty)
+    }
+  }
+}
diff --git a/pom.xml b/pom.xml
index 4f1f93f3492..6af47dd0eaf 100644
--- a/pom.xml
+++ b/pom.xml
@@ -221,6 +221,8 @@
     ./python/pyspark/sql/pandas/utils.py, and ./python/setup.py too.
     -->
     <arrow.version>11.0.0</arrow.version>
+    <ammonite.version>2.5.8</ammonite.version>
+
     <!-- org.fusesource.leveldbjni will be used except on arm64 platform. -->
     <leveldbjni.group>org.fusesource.leveldbjni</leveldbjni.group>
     <kubernetes-client.version>6.4.1</kubernetes-client.version>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to