This is an automated email from the ASF dual-hosted git repository.

bowenliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new eeee5c1ae [KYUUBI #4959] [MINOR] Code improvements for Scala
eeee5c1ae is described below

commit eeee5c1ae359c28eddccef1158a2eaaae6e7da74
Author: liangbowen <[email protected]>
AuthorDate: Fri Jun 16 21:20:17 2023 +0800

    [KYUUBI #4959] [MINOR] Code improvements for Scala
    
    ### _Why are the changes needed?_
    
    - To improve Scala code with corrections, simplification, scala style, 
redundancy cleaning-up. No feature changes introduced.
    
    Corrections:
    - Class doesn't correspond to file name (SparkListenerExtensionTest)
    - Correct package name in ResultSetUtil and PySparkTests
    
    Improvements:
    - 'var' could be a 'val'
    - GetOrElse(null) to orNull
    
    Cleanup & Simplification:
    - Redundant cast inspection
    - Redundant collection conversion
    - Simplify boolean expression
    - Redundant new on case class
    - Redundant return
    - Unnecessary parentheses
    - Unnecessary partial function
    - Simplifiable empty check
    - Anonymous function convertible to a method value
    
    Scala Style:
    - Constructing range for seq indices
    - Get and getOrElse to getOrElse
    - Convert expression to Single Abstract Method (SAM)
    - Scala unnecessary semicolon inspection
    - Map and getOrElse(false) to exists
    - Map and flatten to flatMap
    - Null initializer can be replaced by _
    - scaladoc link to method
    
    Other Improvements:
    - Replace map and getOrElse(true) with forall
    - Unit return type in the argument of map
    - Size to length on arrays and strings
    - Type check can be pattern matching
    - Java mutator method accessed as parameterless
    - Procedure syntax in method definition
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [ ] [Run 
test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #4959 from bowenliang123/scala-Improve.
    
    Closes #4959
    
    2d36ff351 [liangbowen] code improvement for Scala
    
    Authored-by: liangbowen <[email protected]>
    Signed-off-by: liangbowen <[email protected]>
---
 .../kyuubi/sql/KyuubiSparkSQLAstBuilder.scala      |  3 +-
 .../org/apache/spark/sql/ZorderSuiteBase.scala     | 28 +++++++++--------
 .../spark/connector/hive/HiveTableCatalog.scala    |  2 +-
 .../connector/hive/read/FilePartitionReader.scala  |  2 +-
 .../hive/kyuubi/connector/HiveBridgeHelper.scala   |  2 +-
 .../connector/hive/ExternalCatalogPoolSuite.scala  |  4 +--
 .../spark/connector/hive/HiveCatalogSuite.scala    |  2 +-
 .../spark/connector/tpcds/TPCDSCatalogSuite.scala  |  2 +-
 .../spark/connector/tpch/TPCHCatalogSuite.scala    |  2 +-
 .../spark/connector/tpch/TPCHQuerySuite.scala      |  2 +-
 ...Test.scala => SparkListenerExtensionTest.scala} |  0
 .../kyuubi/engine/flink/result/ResultSet.scala     |  2 +-
 .../kyuubi/engine/flink/result/ResultSetUtil.scala |  2 +-
 .../flink/operation/FlinkOperationSuite.scala      |  2 +-
 .../apache/spark/kyuubi/SparkProgressMonitor.scala |  6 +---
 .../execution/arrow/KyuubiArrowConverters.scala    |  4 +--
 .../org/apache/spark/ui/EngineSessionPage.scala    |  4 +--
 .../operation/SparkArrowbasedOperationSuite.scala  |  2 +-
 .../SparkCatalogDatabaseOperationSuite.scala       |  2 +-
 .../spark/operation/SparkOperationSuite.scala      | 16 +++++-----
 .../engine/trino/session/TrinoSessionImpl.scala    |  4 +--
 .../kyuubi/engine/trino/TrinoStatementSuite.scala  |  6 ++--
 .../trino/operation/TrinoOperationSuite.scala      | 12 ++++----
 .../it/trino/server/TrinoFrontendSuite.scala       |  2 +-
 .../org/apache/kyuubi/config/ConfigBuilder.scala   |  2 +-
 .../apache/kyuubi/service/TFrontendService.scala   |  2 +-
 .../org/apache/kyuubi/session/SessionManager.scala |  6 ++--
 .../scala/org/apache/kyuubi/HiveEngineTests.scala  |  6 ++--
 .../authentication/PlainSASLServerSuite.scala      |  4 +--
 .../kyuubi/ctl/cli/ControlCliArguments.scala       |  4 +--
 .../org/apache/kyuubi/ctl/util/Tabulator.scala     |  4 +--
 .../apache/kyuubi/ctl/BatchCliArgumentsSuite.scala |  4 +--
 .../org/apache/kyuubi/ctl/TestPrematureExit.scala  |  6 ++--
 .../org/apache/kyuubi/events/EventBusSuite.scala   | 36 ++++++++++------------
 .../kyuubi/ha/client/DiscoveryClientTests.scala    |  2 +-
 .../kyuubi/server/api/v1/AdminResource.scala       |  8 ++---
 .../kyuubi/server/api/v1/ApiRootResource.scala     |  4 +--
 .../kyuubi/server/api/v1/OperationsResource.scala  |  2 +-
 .../kyuubi/server/api/v1/SessionsResource.scala    |  7 ++---
 .../http/authentication/AuthenticationFilter.scala |  4 +--
 .../authentication/AuthenticationHandler.scala     |  4 +--
 .../KerberosAuthenticationHandler.scala            |  4 +--
 .../server/http/authentication/KerberosUtil.scala  |  2 +-
 .../kyuubi/server/trino/api/TrinoContext.scala     |  2 +-
 .../server/trino/api/v1/StatementResource.scala    |  2 +-
 .../org/apache/kyuubi/session/KyuubiSession.scala  |  4 +--
 .../kyuubi/sql/plan/command/RunnableCommand.scala  |  2 +-
 .../org/apache/kyuubi/util/KubernetesUtils.scala   |  2 +-
 .../scala/org/apache/kyuubi/util/Validator.scala   |  2 +-
 .../org/apache/kyuubi/engine/EngineRefTests.scala  |  4 +--
 .../apache/kyuubi/engine/spark/PySparkTests.scala  |  6 ++--
 .../engine/spark/SparkProcessBuilderSuite.scala    | 20 ++++++------
 .../KyuubiOperationThriftHttpPerUserSuite.scala    |  2 +-
 .../kyuubi/server/api/v1/AdminResourceSuite.scala  |  4 +--
 .../server/api/v1/BatchesResourceSuite.scala       |  2 +-
 .../authentication/AuthenticationFilterSuite.scala |  2 +-
 .../server/rest/client/AdminRestApiSuite.scala     |  2 +-
 .../server/rest/client/SessionRestApiSuite.scala   |  5 ++-
 .../trino/api/v1/StatementResourceSuite.scala      |  9 ++----
 59 files changed, 139 insertions(+), 155 deletions(-)

diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
 
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
index 9f1958b09..82e3e6da5 100644
--- 
a/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
+++ 
b/extensions/spark/kyuubi-extension-spark-common/src/main/scala/org/apache/kyuubi/sql/KyuubiSparkSQLAstBuilder.scala
@@ -81,7 +81,6 @@ abstract class KyuubiSparkSQLAstBuilderBase extends 
KyuubiSparkSQLBaseVisitor[An
     val zorderCols = ctx.zorderClause().order.asScala
       .map(visitMultipartIdentifier)
       .map(UnresolvedAttribute(_))
-      .toSeq
 
     val orderExpr =
       if (zorderCols.length == 1) {
@@ -381,7 +380,7 @@ abstract class KyuubiSparkSQLAstBuilderBase extends 
KyuubiSparkSQLBaseVisitor[An
   private def stringToDate(s: UTF8String): Option[Int] = {
     def isValidDigits(segment: Int, digits: Int): Boolean = {
       // An integer is able to represent a date within [+-]5 million years.
-      var maxDigitsYear = 7
+      val maxDigitsYear = 7
       (segment == 0 && digits >= 4 && digits <= maxDigitsYear) ||
       (segment != 0 && digits > 0 && digits <= 2)
     }
diff --git 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
 
b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
index b24533e69..f48d11e15 100644
--- 
a/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
+++ 
b/extensions/spark/kyuubi-extension-spark-common/src/test/scala/org/apache/spark/sql/ZorderSuiteBase.scala
@@ -245,20 +245,22 @@ trait ZorderSuiteBase extends KyuubiSparkSQLExtensionTest 
with ExpressionEvalHel
       resHasSort: Boolean): Unit = {
     def checkSort(plan: LogicalPlan): Unit = {
       assert(plan.isInstanceOf[Sort] === resHasSort)
-      if (plan.isInstanceOf[Sort]) {
-        val colArr = cols.split(",")
-        val refs =
-          if (colArr.length == 1) {
-            plan.asInstanceOf[Sort].order.head
-              .child.asInstanceOf[AttributeReference] :: Nil
-          } else {
-            plan.asInstanceOf[Sort].order.head
-              .child.asInstanceOf[Zorder].children.map(_.references.head)
+      plan match {
+        case sort: Sort =>
+          val colArr = cols.split(",")
+          val refs =
+            if (colArr.length == 1) {
+              sort.order.head
+                .child.asInstanceOf[AttributeReference] :: Nil
+            } else {
+              sort.order.head
+                .child.asInstanceOf[Zorder].children.map(_.references.head)
+            }
+          assert(refs.size === colArr.size)
+          refs.zip(colArr).foreach { case (ref, col) =>
+            assert(ref.name === col.trim)
           }
-        assert(refs.size === colArr.size)
-        refs.zip(colArr).foreach { case (ref, col) =>
-          assert(ref.name === col.trim)
-        }
+        case _ =>
       }
     }
 
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
index d4e0f5ea2..1199987b4 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/HiveTableCatalog.scala
@@ -259,7 +259,7 @@ class HiveTableCatalog(sparkSession: SparkSession)
   private def toOptions(properties: Map[String, String]): Map[String, String] 
= {
     properties.filterKeys(_.startsWith(TableCatalog.OPTION_PREFIX)).map {
       case (key, value) => key.drop(TableCatalog.OPTION_PREFIX.length) -> value
-    }.toMap
+    }
   }
 
   override def listNamespaces(): Array[Array[String]] = {
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/FilePartitionReader.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/FilePartitionReader.scala
index d0cd680d4..8ac90b3fe 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/FilePartitionReader.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/read/FilePartitionReader.scala
@@ -31,7 +31,7 @@ import org.apache.spark.sql.internal.SQLConf
 // scalastyle:on line.size.limit
 class FilePartitionReader[T](readers: Iterator[HivePartitionedFileReader[T]])
   extends PartitionReader[T] with Logging {
-  private var currentReader: HivePartitionedFileReader[T] = null
+  private var currentReader: HivePartitionedFileReader[T] = _
 
   private val sqlConf = SQLConf.get
   private def ignoreMissingFiles = sqlConf.ignoreMissingFiles
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/spark/sql/hive/kyuubi/connector/HiveBridgeHelper.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/spark/sql/hive/kyuubi/connector/HiveBridgeHelper.scala
index 349edd327..305c1450e 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/spark/sql/hive/kyuubi/connector/HiveBridgeHelper.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/spark/sql/hive/kyuubi/connector/HiveBridgeHelper.scala
@@ -79,7 +79,7 @@ object HiveBridgeHelper {
             s"Unsupported partition transform: $transform")
       }
 
-      (identityCols.toSeq, bucketSpec)
+      (identityCols, bucketSpec)
     }
   }
 
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/ExternalCatalogPoolSuite.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/ExternalCatalogPoolSuite.scala
index 7c02e8531..937e32d6d 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/ExternalCatalogPoolSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/ExternalCatalogPoolSuite.scala
@@ -56,11 +56,11 @@ class ExternalCatalogPoolSuite extends KyuubiHiveTest {
       val externalCatalog2 = pool.take(catalog2)
 
       assert(externalCatalog1 != externalCatalog2)
-      (1 to 10).foreach { id =>
+      (1 to 10).foreach { _ =>
         assert(pool.take(catalog1) == externalCatalog1)
       }
 
-      (1 to 10).foreach { id =>
+      (1 to 10).foreach { _ =>
         assert(pool.take(catalog2) == externalCatalog2)
       }
     }
diff --git 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
index 7a1eb86dc..9088a6cfe 100644
--- 
a/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-hive/src/test/scala/org/apache/kyuubi/spark/connector/hive/HiveCatalogSuite.scala
@@ -95,7 +95,7 @@ class HiveCatalogSuite extends KyuubiHiveTest {
   }
 
   test("get catalog name") {
-    withSparkSession() { spark =>
+    withSparkSession() { _ =>
       val catalog = new HiveTableCatalog
       val catalogName = "hive"
       catalog.initialize(catalogName, CaseInsensitiveStringMap.empty())
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
 
b/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
index 55a7fa3e9..451cee135 100644
--- 
a/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-tpcds/src/test/scala/org/apache/kyuubi/spark/connector/tpcds/TPCDSCatalogSuite.scala
@@ -35,7 +35,7 @@ class TPCDSCatalogSuite extends KyuubiFunSuite {
       .set("spark.sql.catalog.tpcds", classOf[TPCDSCatalog].getName)
       .set("spark.sql.cbo.enabled", "true")
       .set("spark.sql.cbo.planStats.enabled", "true")
-    withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { 
spark =>
+    withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { _ 
=>
       val catalog = new TPCDSCatalog
       val catalogName = "test"
       catalog.initialize(catalogName, CaseInsensitiveStringMap.empty())
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
 
b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
index 0fdfc2689..802c8d690 100644
--- 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHCatalogSuite.scala
@@ -35,7 +35,7 @@ class TPCHCatalogSuite extends KyuubiFunSuite {
       .set("spark.sql.catalog.tpch", classOf[TPCHCatalog].getName)
       .set("spark.sql.cbo.enabled", "true")
       .set("spark.sql.cbo.planStats.enabled", "true")
-    withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { 
spark =>
+    withSparkSession(SparkSession.builder.config(sparkConf).getOrCreate()) { _ 
=>
       val catalog = new TPCHCatalog
       val catalogName = "test"
       catalog.initialize(catalogName, CaseInsensitiveStringMap.empty())
diff --git 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHQuerySuite.scala
 
b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHQuerySuite.scala
index efeaeb36c..88495f037 100644
--- 
a/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHQuerySuite.scala
+++ 
b/extensions/spark/kyuubi-spark-connector-tpch/src/test/scala/org/apache/kyuubi/spark/connector/tpch/TPCHQuerySuite.scala
@@ -54,7 +54,7 @@ class TPCHQuerySuite extends KyuubiFunSuite {
   val queries: Set[String] = (1 to 22).map(i => s"q$i").toSet
 
   test("run query on tiny") {
-    val viewSuffix = "view";
+    val viewSuffix = "view"
     val sparkConf = new SparkConf().setMaster("local[*]")
       .set("spark.ui.enabled", "false")
       .set("spark.sql.catalogImplementation", "in-memory")
diff --git 
a/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtenstionTest.scala
 
b/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala
similarity index 100%
rename from 
extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtenstionTest.scala
rename to 
extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala
diff --git 
a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSet.scala
 
b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSet.scala
index b90be09ff..1e94042d0 100644
--- 
a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSet.scala
+++ 
b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSet.scala
@@ -69,7 +69,7 @@ object ResultSet {
       .resultKind(ResultKind.SUCCESS_WITH_CONTENT)
       .columns(Column.physical("result", DataTypes.STRING()))
       .data(data)
-      .build;
+      .build
   }
 
   def builder: Builder = new ResultSet.Builder
diff --git 
a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSetUtil.scala
 
b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSetUtil.scala
index d6bc2bada..c1169528c 100644
--- 
a/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSetUtil.scala
+++ 
b/externals/kyuubi-flink-sql-engine/src/main/scala/org/apache/kyuubi/engine/flink/result/ResultSetUtil.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.kyuubi.engine.flink.result;
+package org.apache.kyuubi.engine.flink.result
 
 import scala.collection.convert.ImplicitConversions._
 import scala.collection.mutable.ListBuffer
diff --git 
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
 
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
index 39d17aa7b..1b27ae974 100644
--- 
a/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
+++ 
b/externals/kyuubi-flink-sql-engine/src/test/scala/org/apache/kyuubi/engine/flink/operation/FlinkOperationSuite.scala
@@ -1035,7 +1035,7 @@ abstract class FlinkOperationSuite extends 
HiveJDBCTestHelper with WithFlinkTest
       assert(metadata.getColumnType(1) === java.sql.Types.VARCHAR)
       assert(resultSet.next())
       assert(resultSet.getString(1).length == 32)
-    };
+    }
   }
 
   test("execute statement - streaming insert into") {
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkProgressMonitor.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkProgressMonitor.scala
index a46cbecc2..1d9ef53ea 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkProgressMonitor.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/kyuubi/SparkProgressMonitor.scala
@@ -136,12 +136,8 @@ class SparkProgressMonitor(spark: SparkSession, jobGroup: 
String) {
       trimmedVName = s.substring(0, COLUMN_1_WIDTH - 2)
       trimmedVName += ".."
     } else trimmedVName += " "
-    val result = new StringBuilder(trimmedVName)
     val toFill = (spaceRemaining * percent).toInt
-    for (i <- 0 until toFill) {
-      result.append(".")
-    }
-    result.toString
+    s"$trimmedVName${"." * toFill}"
   }
 
   private def getCompletedStages: Int = {
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/execution/arrow/KyuubiArrowConverters.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/execution/arrow/KyuubiArrowConverters.scala
index f78552602..5c4d7086f 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/execution/arrow/KyuubiArrowConverters.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/sql/execution/arrow/KyuubiArrowConverters.scala
@@ -128,7 +128,7 @@ object KyuubiArrowConverters extends SQLConfHelper with 
Logging {
     val n = collectLimitExec.limit
     val schema = collectLimitExec.schema
     if (n == 0) {
-      return new Array[Batch](0)
+      new Array[Batch](0)
     } else {
       val limitScaleUpFactor = Math.max(conf.limitScaleUpFactor, 2)
       // TODO: refactor and reuse the code from RDD's take()
@@ -156,7 +156,7 @@ object KyuubiArrowConverters extends SQLConfHelper with 
Logging {
         }
 
         val partsToScan =
-          partsScanned.until(math.min(partsScanned + numPartsToTry, 
totalParts).toInt)
+          partsScanned.until(math.min(partsScanned + numPartsToTry, 
totalParts))
 
         // TODO: SparkPlan.session introduced in SPARK-35798, replace with 
SparkPlan.session once we
         // drop Spark-3.1.x support.
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala
index 1f34ae64f..8ae830a84 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/spark/ui/EngineSessionPage.scala
@@ -42,7 +42,7 @@ case class EngineSessionPage(parent: EngineTab)
     require(parameterId != null && parameterId.nonEmpty, "Missing id 
parameter")
 
     val content = store.synchronized { // make sure all parts in this page are 
consistent
-      val sessionStat = store.getSession(parameterId).getOrElse(null)
+      val sessionStat = store.getSession(parameterId).orNull
       require(sessionStat != null, "Invalid sessionID[" + parameterId + "]")
 
       val redactionPattern = parent.sparkUI match {
@@ -51,7 +51,7 @@ case class EngineSessionPage(parent: EngineTab)
       }
 
       val sessionPropertiesTable =
-        if (sessionStat.conf != null && !sessionStat.conf.isEmpty) {
+        if (sessionStat.conf != null && sessionStat.conf.nonEmpty) {
           val table = UIUtils.listingTable(
             propertyHeader,
             propertyRow,
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
index 8818ae7a9..2e7e41a2a 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkArrowbasedOperationSuite.scala
@@ -583,7 +583,7 @@ class SparkArrowbasedOperationSuite extends 
WithSparkSQLEngine with SparkDataTyp
   }
 
   class SQLMetricsListener extends QueryExecutionListener {
-    var queryExecution: QueryExecution = null
+    var queryExecution: QueryExecution = _
     override def onSuccess(funcName: String, qe: QueryExecution, durationNs: 
Long): Unit = {
       queryExecution = qe
     }
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkCatalogDatabaseOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkCatalogDatabaseOperationSuite.scala
index 69431266b..5ee01bda1 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkCatalogDatabaseOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkCatalogDatabaseOperationSuite.scala
@@ -61,7 +61,7 @@ class DummyCatalog extends CatalogPlugin {
     _name = name
   }
 
-  private var _name: String = null
+  private var _name: String = _
 
   override def name(): String = _name
 
diff --git 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
index 650bdabd9..f5d265422 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/operation/SparkOperationSuite.scala
@@ -306,28 +306,28 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
       val tFetchResultsReq1 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_NEXT, 1)
       val tFetchResultsResp1 = client.FetchResults(tFetchResultsReq1)
       assert(tFetchResultsResp1.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq1 = 
tFetchResultsResp1.getResults.getColumns.get(0).getI64Val.getValues.asScala.toSeq
+      val idSeq1 = 
tFetchResultsResp1.getResults.getColumns.get(0).getI64Val.getValues.asScala
       assertResult(Seq(0L))(idSeq1)
 
       // fetch next from first row
       val tFetchResultsReq2 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_NEXT, 1)
       val tFetchResultsResp2 = client.FetchResults(tFetchResultsReq2)
       assert(tFetchResultsResp2.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq2 = 
tFetchResultsResp2.getResults.getColumns.get(0).getI64Val.getValues.asScala.toSeq
+      val idSeq2 = 
tFetchResultsResp2.getResults.getColumns.get(0).getI64Val.getValues.asScala
       assertResult(Seq(1L))(idSeq2)
 
       // fetch prior from second row, expected got first row
       val tFetchResultsReq3 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_PRIOR, 1)
       val tFetchResultsResp3 = client.FetchResults(tFetchResultsReq3)
       assert(tFetchResultsResp3.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq3 = 
tFetchResultsResp3.getResults.getColumns.get(0).getI64Val.getValues.asScala.toSeq
+      val idSeq3 = 
tFetchResultsResp3.getResults.getColumns.get(0).getI64Val.getValues.asScala
       assertResult(Seq(0L))(idSeq3)
 
       // fetch first
       val tFetchResultsReq4 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_FIRST, 3)
       val tFetchResultsResp4 = client.FetchResults(tFetchResultsReq4)
       assert(tFetchResultsResp4.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq4 = 
tFetchResultsResp4.getResults.getColumns.get(0).getI64Val.getValues.asScala.toSeq
+      val idSeq4 = 
tFetchResultsResp4.getResults.getColumns.get(0).getI64Val.getValues.asScala
       assertResult(Seq(0L, 1L))(idSeq4)
     }
   }
@@ -349,7 +349,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
         val tFetchResultsResp1 = client.FetchResults(tFetchResultsReq1)
         assert(tFetchResultsResp1.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq1 = tFetchResultsResp1.getResults.getColumns.get(0)
-          .getI64Val.getValues.asScala.toSeq
+          .getI64Val.getValues.asScala
         assertResult(Seq(0L))(idSeq1)
 
         // fetch next from first row
@@ -357,7 +357,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
         val tFetchResultsResp2 = client.FetchResults(tFetchResultsReq2)
         assert(tFetchResultsResp2.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq2 = tFetchResultsResp2.getResults.getColumns.get(0)
-          .getI64Val.getValues.asScala.toSeq
+          .getI64Val.getValues.asScala
         assertResult(Seq(1L))(idSeq2)
 
         // fetch prior from second row, expected got first row
@@ -365,7 +365,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
         val tFetchResultsResp3 = client.FetchResults(tFetchResultsReq3)
         assert(tFetchResultsResp3.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq3 = tFetchResultsResp3.getResults.getColumns.get(0)
-          .getI64Val.getValues.asScala.toSeq
+          .getI64Val.getValues.asScala
         assertResult(Seq(0L))(idSeq3)
 
         // fetch first
@@ -373,7 +373,7 @@ class SparkOperationSuite extends WithSparkSQLEngine with 
HiveMetadataTests with
         val tFetchResultsResp4 = client.FetchResults(tFetchResultsReq4)
         assert(tFetchResultsResp4.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq4 = tFetchResultsResp4.getResults.getColumns.get(0)
-          .getI64Val.getValues.asScala.toSeq
+          .getI64Val.getValues.asScala
         assertResult(Seq(0L, 1L))(idSeq4)
       }
     }
diff --git 
a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala
 
b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala
index 6869e54dc..42b21fc29 100644
--- 
a/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala
+++ 
b/externals/kyuubi-trino-engine/src/main/scala/org/apache/kyuubi/engine/trino/session/TrinoSessionImpl.scala
@@ -51,8 +51,8 @@ class TrinoSessionImpl(
 
   var trinoContext: TrinoContext = _
   private var clientSession: ClientSession = _
-  private var catalogName: String = null
-  private var databaseName: String = null
+  private var catalogName: String = _
+  private var databaseName: String = _
 
   private val sessionEvent = TrinoSessionEvent(this)
 
diff --git 
a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
 
b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
index fc9f1af5f..dec753ad4 100644
--- 
a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
+++ 
b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/TrinoStatementSuite.scala
@@ -30,15 +30,15 @@ class TrinoStatementSuite extends WithTrinoContainerServer {
       assert(schema.size === 1)
       assert(schema(0).getName === "_col0")
 
-      assert(resultSet.toIterator.hasNext)
-      assert(resultSet.toIterator.next() === List(1))
+      assert(resultSet.hasNext)
+      assert(resultSet.next() === List(1))
 
       val trinoStatement2 = TrinoStatement(trinoContext, kyuubiConf, "show 
schemas")
       val schema2 = trinoStatement2.getColumns
       val resultSet2 = trinoStatement2.execute()
 
       assert(schema2.size === 1)
-      assert(resultSet2.toIterator.hasNext)
+      assert(resultSet2.hasNext)
     }
   }
 
diff --git 
a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
 
b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
index a6f125af5..90939a3e4 100644
--- 
a/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
+++ 
b/externals/kyuubi-trino-engine/src/test/scala/org/apache/kyuubi/engine/trino/operation/TrinoOperationSuite.scala
@@ -590,14 +590,14 @@ class TrinoOperationSuite extends WithTrinoEngine with 
TrinoQueryTests {
       val tFetchResultsReq1 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_NEXT, 1)
       val tFetchResultsResp1 = client.FetchResults(tFetchResultsReq1)
       assert(tFetchResultsResp1.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq1 = 
tFetchResultsResp1.getResults.getColumns.get(0).getI32Val.getValues.asScala.toSeq
+      val idSeq1 = 
tFetchResultsResp1.getResults.getColumns.get(0).getI32Val.getValues.asScala
       assertResult(Seq(0L))(idSeq1)
 
       // fetch next from first row
       val tFetchResultsReq2 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_NEXT, 1)
       val tFetchResultsResp2 = client.FetchResults(tFetchResultsReq2)
       assert(tFetchResultsResp2.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
-      val idSeq2 = 
tFetchResultsResp2.getResults.getColumns.get(0).getI32Val.getValues.asScala.toSeq
+      val idSeq2 = 
tFetchResultsResp2.getResults.getColumns.get(0).getI32Val.getValues.asScala
       assertResult(Seq(1L))(idSeq2)
 
       val tFetchResultsReq3 = new TFetchResultsReq(opHandle, 
TFetchOrientation.FETCH_PRIOR, 1)
@@ -607,7 +607,7 @@ class TrinoOperationSuite extends WithTrinoEngine with 
TrinoQueryTests {
       } else {
         assert(tFetchResultsResp3.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq3 =
-          
tFetchResultsResp3.getResults.getColumns.get(0).getI32Val.getValues.asScala.toSeq
+          
tFetchResultsResp3.getResults.getColumns.get(0).getI32Val.getValues.asScala
         assertResult(Seq(0L))(idSeq3)
       }
 
@@ -618,7 +618,7 @@ class TrinoOperationSuite extends WithTrinoEngine with 
TrinoQueryTests {
       } else {
         assert(tFetchResultsResp4.getStatus.getStatusCode === 
TStatusCode.SUCCESS_STATUS)
         val idSeq4 =
-          
tFetchResultsResp4.getResults.getColumns.get(0).getI32Val.getValues.asScala.toSeq
+          
tFetchResultsResp4.getResults.getColumns.get(0).getI32Val.getValues.asScala
         assertResult(Seq(0L, 1L))(idSeq4)
       }
     }
@@ -771,8 +771,8 @@ class TrinoOperationSuite extends WithTrinoEngine with 
TrinoQueryTests {
         assert(schema.size === 1)
         assert(schema(0).getName === "_col0")
 
-        assert(resultSet.toIterator.hasNext)
-        version = resultSet.toIterator.next().head.toString
+        assert(resultSet.hasNext)
+        version = resultSet.next().head.toString
       }
       version
     }
diff --git 
a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala
 
b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala
index 4a175a28b..7575bf8a9 100644
--- 
a/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala
+++ 
b/integration-tests/kyuubi-trino-it/src/test/scala/org/apache/kyuubi/it/trino/server/TrinoFrontendSuite.scala
@@ -73,7 +73,7 @@ class TrinoFrontendSuite extends WithKyuubiServer with 
SparkMetadataTests {
         statement.execute("SELECT 1")
       }
     } catch {
-      case NonFatal(e) =>
+      case NonFatal(_) =>
     }
   }
 }
diff --git 
a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/ConfigBuilder.scala 
b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/ConfigBuilder.scala
index 8d7501552..5f31fade3 100644
--- a/kyuubi-common/src/main/scala/org/apache/kyuubi/config/ConfigBuilder.scala
+++ b/kyuubi-common/src/main/scala/org/apache/kyuubi/config/ConfigBuilder.scala
@@ -151,7 +151,7 @@ private[kyuubi] case class ConfigBuilder(key: String) {
       }
     }
 
-    new TypedConfigBuilder(this, regexFromString(_, this.key), _.toString)
+    TypedConfigBuilder(this, regexFromString(_, this.key), _.toString)
   }
 }
 
diff --git 
a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala 
b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala
index e541c37c0..7532beccb 100644
--- 
a/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala
+++ 
b/kyuubi-common/src/main/scala/org/apache/kyuubi/service/TFrontendService.scala
@@ -609,7 +609,7 @@ abstract class TFrontendService(name: String)
         info(s"Session [$handle] disconnected without closing properly, close 
it now")
         try {
           val needToClose = be.sessionManager.getSession(handle).conf
-            .get(SESSION_CLOSE_ON_DISCONNECT.key).getOrElse("true").toBoolean
+            .getOrElse(SESSION_CLOSE_ON_DISCONNECT.key, "true").toBoolean
           if (needToClose) {
             be.closeSession(handle)
           } else {
diff --git 
a/kyuubi-common/src/main/scala/org/apache/kyuubi/session/SessionManager.scala 
b/kyuubi-common/src/main/scala/org/apache/kyuubi/session/SessionManager.scala
index f91447f98..7e6c50199 100644
--- 
a/kyuubi-common/src/main/scala/org/apache/kyuubi/session/SessionManager.scala
+++ 
b/kyuubi-common/src/main/scala/org/apache/kyuubi/session/SessionManager.scala
@@ -209,11 +209,11 @@ abstract class SessionManager(name: String) extends 
CompositeService(name) {
         key
       }
 
-    if (_confRestrictMatchList.exists(normalizedKey.startsWith(_)) ||
+    if (_confRestrictMatchList.exists(normalizedKey.startsWith) ||
       _confRestrictList.contains(normalizedKey)) {
       throw KyuubiSQLException(s"$normalizedKey is a restrict key according to 
the server-side" +
         s" configuration, please remove it and retry if you want to proceed")
-    } else if (_confIgnoreMatchList.exists(normalizedKey.startsWith(_)) ||
+    } else if (_confIgnoreMatchList.exists(normalizedKey.startsWith) ||
       _confIgnoreList.contains(normalizedKey)) {
       warn(s"$normalizedKey is a ignored key according to the server-side 
configuration")
       None
@@ -228,7 +228,7 @@ abstract class SessionManager(name: String) extends 
CompositeService(name) {
 
   // validate whether if a batch key should be ignored
   def validateBatchKey(key: String, value: String): Option[(String, String)] = 
{
-    if (_batchConfIgnoreMatchList.exists(key.startsWith(_)) || 
_batchConfIgnoreList.contains(key)) {
+    if (_batchConfIgnoreMatchList.exists(key.startsWith) || 
_batchConfIgnoreList.contains(key)) {
       warn(s"$key is a ignored batch key according to the server-side 
configuration")
       None
     } else {
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
index 9eb4a2440..028f755f6 100644
--- a/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
+++ b/kyuubi-common/src/test/scala/org/apache/kyuubi/HiveEngineTests.scala
@@ -140,7 +140,7 @@ trait HiveEngineTests extends HiveJDBCTestHelper {
       try {
         val meta = statement.getConnection.getMetaData
         var resultSet = meta.getColumns(null, null, null, null)
-        var resultSetBuffer = ArrayBuffer[(String, String, String, String, 
String)]()
+        val resultSetBuffer = ArrayBuffer[(String, String, String, String, 
String)]()
         while (resultSet.next()) {
           resultSetBuffer += Tuple5(
             resultSet.getString(TABLE_CAT),
@@ -434,8 +434,8 @@ trait HiveEngineTests extends HiveJDBCTestHelper {
       val res = statement.getConnection.getMetaData.getClientInfoProperties
       assert(res.next())
       assert(res.getString(1) === "ApplicationName")
-      assert(res.getInt("MAX_LEN") === 1000);
-      assert(!res.next());
+      assert(res.getInt("MAX_LEN") === 1000)
+      assert(!res.next())
 
       val connection = statement.getConnection
       connection.setClientInfo("ApplicationName", "test kyuubi hive jdbc")
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLServerSuite.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLServerSuite.scala
index 78fe3ef7a..a7f4b9535 100644
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLServerSuite.scala
+++ 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/service/authentication/PlainSASLServerSuite.scala
@@ -79,9 +79,7 @@ class PlainSASLServerSuite extends KyuubiFunSuite {
       "NONE",
       "KYUUBI",
       map,
-      new CallbackHandler {
-        override def handle(callbacks: Array[Callback]): Unit = {}
-      })
+      _ => {})
     val e6 = 
intercept[SaslException](server2.evaluateResponse(res4.map(_.toByte)))
     assert(e6.getMessage === "Error validating the login")
     assert(e6.getCause.getMessage === "Authentication failed")
diff --git 
a/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/cli/ControlCliArguments.scala 
b/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/cli/ControlCliArguments.scala
index 41d53b568..35b4ccacf 100644
--- 
a/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/cli/ControlCliArguments.scala
+++ 
b/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/cli/ControlCliArguments.scala
@@ -33,9 +33,9 @@ import org.apache.kyuubi.ctl.opt.{CliConfig, CommandLine, 
ControlAction, Control
 class ControlCliArguments(args: Seq[String], env: Map[String, String] = 
sys.env)
   extends ControlCliArgumentsParser with Logging {
 
-  var cliConfig: CliConfig = null
+  var cliConfig: CliConfig = _
 
-  var command: Command[_] = null
+  var command: Command[_] = _
 
   // Set parameters from command line arguments
   parse(args)
diff --git 
a/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/util/Tabulator.scala 
b/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/util/Tabulator.scala
index 704436289..70fed87f6 100644
--- a/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/util/Tabulator.scala
+++ b/kyuubi-ctl/src/main/scala/org/apache/kyuubi/ctl/util/Tabulator.scala
@@ -23,11 +23,11 @@ import org.apache.commons.lang3.StringUtils
 private[kyuubi] object Tabulator {
   def format(title: String, header: Array[String], rows: 
Array[Array[String]]): String = {
     val textTable = formatTextTable(header, rows)
-    val footer = s"${rows.size} row(s)\n"
+    val footer = s"${rows.length} row(s)\n"
     if (StringUtils.isBlank(title)) {
       textTable + footer
     } else {
-      val rowWidth = textTable.split("\n").head.size
+      val rowWidth = textTable.split("\n").head.length
       val titleNewLine = "\n" + StringUtils.center(title, rowWidth) + "\n"
       titleNewLine + textTable + footer
     }
diff --git 
a/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/BatchCliArgumentsSuite.scala 
b/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/BatchCliArgumentsSuite.scala
index 7563d985a..bf8f101e0 100644
--- 
a/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/BatchCliArgumentsSuite.scala
+++ 
b/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/BatchCliArgumentsSuite.scala
@@ -84,7 +84,7 @@ class BatchCliArgumentsSuite extends KyuubiFunSuite with 
TestPrematureExit {
       "-f",
       batchYamlFile)
     val opArgs = new ControlCliArguments(args)
-    assert(opArgs.cliConfig.batchOpts.waitCompletion == true)
+    assert(opArgs.cliConfig.batchOpts.waitCompletion)
   }
 
   test("submit batch without waitForCompletion") {
@@ -96,7 +96,7 @@ class BatchCliArgumentsSuite extends KyuubiFunSuite with 
TestPrematureExit {
       "--waitCompletion",
       "false")
     val opArgs = new ControlCliArguments(args)
-    assert(opArgs.cliConfig.batchOpts.waitCompletion == false)
+    assert(!opArgs.cliConfig.batchOpts.waitCompletion)
   }
 
   test("get/delete batch") {
diff --git 
a/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/TestPrematureExit.scala 
b/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/TestPrematureExit.scala
index 0e4cc1302..5f8107da7 100644
--- a/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/TestPrematureExit.scala
+++ b/kyuubi-ctl/src/test/scala/org/apache/kyuubi/ctl/TestPrematureExit.scala
@@ -34,7 +34,7 @@ trait TestPrematureExit {
 
   /** Simple PrintStream that reads data into a buffer */
   private class BufferPrintStream extends PrintStream(noOpOutputStream) {
-    var lineBuffer = ArrayBuffer[String]()
+    val lineBuffer = ArrayBuffer[String]()
     // scalastyle:off println
     override def println(line: Any): Unit = {
       lineBuffer += line.toString
@@ -52,11 +52,11 @@ trait TestPrematureExit {
 
     @volatile var exitedCleanly = false
     val original = mainObject.exitFn
-    mainObject.exitFn = (_) => exitedCleanly = true
+    mainObject.exitFn = _ => exitedCleanly = true
     try {
       @volatile var exception: Exception = null
       val thread = new Thread {
-        override def run() =
+        override def run(): Unit =
           try {
             mainObject.main(input)
           } catch {
diff --git 
a/kyuubi-events/src/test/scala/org/apache/kyuubi/events/EventBusSuite.scala 
b/kyuubi-events/src/test/scala/org/apache/kyuubi/events/EventBusSuite.scala
index 9c75766da..0a8563ee4 100644
--- a/kyuubi-events/src/test/scala/org/apache/kyuubi/events/EventBusSuite.scala
+++ b/kyuubi-events/src/test/scala/org/apache/kyuubi/events/EventBusSuite.scala
@@ -44,29 +44,29 @@ class EventBusSuite extends KyuubiFunSuite {
   }
 
   test("register event handler") {
-    var test0EventRecievedCount = 0
-    var test1EventRecievedCount = 0
-    var test2EventRecievedCount = 0
-    var testEventRecievedCount = 0
+    var test0EventReceivedCount = 0
+    var test1EventReceivedCount = 0
+    var test2EventReceivedCount = 0
+    var testEventReceivedCount = 0
     val liveBus = EventBus()
 
     liveBus.register[Test0KyuubiEvent] { e =>
       assert(e.content == "test0")
       assert(e.eventType == "test0_kyuubi")
-      test0EventRecievedCount += 1
+      test0EventReceivedCount += 1
     }
     liveBus.register[Test1KyuubiEvent] { e =>
       assert(e.content == "test1")
       assert(e.eventType == "test1_kyuubi")
-      test1EventRecievedCount += 1
+      test1EventReceivedCount += 1
     }
     // scribe subclass event
     liveBus.register[TestKyuubiEvent] { e =>
       assert(e.eventType == "test2_kyuubi")
-      test2EventRecievedCount += 1
+      test2EventReceivedCount += 1
     }
-    liveBus.register[KyuubiEvent] { e =>
-      testEventRecievedCount += 1
+    liveBus.register[KyuubiEvent] { _ =>
+      testEventReceivedCount += 1
     }
 
     class Test0Handler extends EventHandler[Test0KyuubiEvent] {
@@ -77,11 +77,9 @@ class EventBusSuite extends KyuubiFunSuite {
 
     liveBus.register[Test0KyuubiEvent](new Test0Handler)
 
-    liveBus.register[Test1KyuubiEvent](new EventHandler[Test1KyuubiEvent] {
-      override def apply(e: Test1KyuubiEvent): Unit = {
-        assert(e.content == "test1")
-      }
-    })
+    liveBus.register[Test1KyuubiEvent] { e =>
+      assert(e.content == "test1")
+    }
 
     (1 to 10) foreach { _ =>
       liveBus.post(Test0KyuubiEvent("test0"))
@@ -92,10 +90,10 @@ class EventBusSuite extends KyuubiFunSuite {
     (1 to 30) foreach { _ =>
       liveBus.post(Test2KyuubiEvent("name2", "test2"))
     }
-    assert(test0EventRecievedCount == 10)
-    assert(test1EventRecievedCount == 20)
-    assert(test2EventRecievedCount == 30)
-    assert(testEventRecievedCount == 60)
+    assert(test0EventReceivedCount == 10)
+    assert(test1EventReceivedCount == 20)
+    assert(test2EventReceivedCount == 30)
+    assert(testEventReceivedCount == 60)
   }
 
   test("register event handler for default bus") {
@@ -120,7 +118,7 @@ class EventBusSuite extends KyuubiFunSuite {
 
   test("async event handler") {
     val countDownLatch = new CountDownLatch(4)
-    val count = new AtomicInteger(0);
+    val count = new AtomicInteger(0)
     class Test0Handler extends EventHandler[Test0KyuubiEvent] {
       override def apply(e: Test0KyuubiEvent): Unit = {
         Thread.sleep(10)
diff --git 
a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
 
b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
index 87db340b5..a8523f7f9 100644
--- 
a/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
+++ 
b/kyuubi-ha/src/test/scala/org/apache/kyuubi/ha/client/DiscoveryClientTests.scala
@@ -162,7 +162,7 @@ trait DiscoveryClientTests extends KyuubiFunSuite {
 
   test("setData method test") {
     withDiscoveryClient(conf) { discoveryClient =>
-      val data = "abc";
+      val data = "abc"
       val path = "/setData_test"
       discoveryClient.create(path, "PERSISTENT")
       discoveryClient.setData(path, data.getBytes)
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/AdminResource.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/AdminResource.scala
index fc271b058..735efa71b 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/AdminResource.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/AdminResource.scala
@@ -127,9 +127,7 @@ private[v1] class AdminResource extends ApiRequestContext 
with Logging {
       val usersSet = users.split(",").toSet
       sessions = sessions.filter(session => usersSet.contains(session.user))
     }
-    sessions.map { case session =>
-      ApiUtils.sessionData(session.asInstanceOf[KyuubiSession])
-    }.toSeq
+    sessions.map(session => 
ApiUtils.sessionData(session.asInstanceOf[KyuubiSession])).toSeq
   }
 
   @ApiResponse(
@@ -259,7 +257,7 @@ private[v1] class AdminResource extends ApiRequestContext 
with Logging {
     val engine = getEngine(userName, engineType, shareLevel, subdomain, "")
     val engineSpace = getEngineSpace(engine)
 
-    var engineNodes = ListBuffer[ServiceNodeInfo]()
+    val engineNodes = ListBuffer[ServiceNodeInfo]()
     Option(subdomain).filter(_.nonEmpty) match {
       case Some(_) =>
         withDiscoveryClient(fe.getConf) { discoveryClient =>
@@ -368,6 +366,6 @@ private[v1] class AdminResource extends ApiRequestContext 
with Logging {
   }
 
   private def isAdministrator(userName: String): Boolean = {
-    administrators.contains(userName);
+    administrators.contains(userName)
   }
 }
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/ApiRootResource.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/ApiRootResource.scala
index fc3150355..8abc23ff1 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/ApiRootResource.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/ApiRootResource.scala
@@ -86,8 +86,8 @@ private[server] object ApiRootResource {
   def getEngineUIProxyHandler(fe: KyuubiRestFrontendService): 
ServletContextHandler = {
     val proxyServlet = new EngineUIProxyServlet()
     val holder = new ServletHolder(proxyServlet)
-    val proxyHandler = new 
ServletContextHandler(ServletContextHandler.NO_SESSIONS);
-    proxyHandler.setContextPath("/engine-ui");
+    val proxyHandler = new 
ServletContextHandler(ServletContextHandler.NO_SESSIONS)
+    proxyHandler.setContextPath("/engine-ui")
     proxyHandler.addServlet(holder, "/*")
     proxyHandler
   }
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/OperationsResource.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/OperationsResource.scala
index b55719749..b79ee27e3 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/OperationsResource.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/OperationsResource.scala
@@ -109,7 +109,7 @@ private[v1] class OperationsResource extends 
ApiRequestContext with Logging {
           var scale = 0
           if (tPrimitiveTypeEntry.getTypeQualifiers != null) {
             val qualifiers = 
tPrimitiveTypeEntry.getTypeQualifiers.getQualifiers
-            val defaultValue = TTypeQualifierValue.i32Value(0);
+            val defaultValue = TTypeQualifierValue.i32Value(0)
             precision = qualifiers.getOrDefault("precision", 
defaultValue).getI32Value
             scale = qualifiers.getOrDefault("scale", defaultValue).getI32Value
           }
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/SessionsResource.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/SessionsResource.scala
index d735b87d8..bc480c027 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/SessionsResource.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/api/v1/SessionsResource.scala
@@ -53,9 +53,8 @@ private[v1] class SessionsResource extends ApiRequestContext 
with Logging {
     description = "get the list of all live sessions")
   @GET
   def sessions(): Seq[SessionData] = {
-    sessionManager.allSessions().map { case session =>
-      ApiUtils.sessionData(session.asInstanceOf[KyuubiSession])
-    }.toSeq
+    sessionManager.allSessions()
+      .map(session => 
ApiUtils.sessionData(session.asInstanceOf[KyuubiSession])).toSeq
   }
 
   @ApiResponse(
@@ -86,7 +85,7 @@ private[v1] class SessionsResource extends ApiRequestContext 
with Logging {
             .startTime(event.startTime)
             .endTime(event.endTime)
             .totalOperations(event.totalOperations)
-            .exception(event.exception.getOrElse(null))
+            .exception(event.exception.orNull)
             .build).get
     } catch {
       case NonFatal(e) =>
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilter.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilter.scala
index 3c4065a7b..8fe245d6a 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilter.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilter.scala
@@ -88,7 +88,7 @@ class AuthenticationFilter(conf: KyuubiConf) extends Filter 
with Logging {
   /**
    * If the request has a valid authentication token it allows the request to 
continue to the
    * target resource, otherwise it triggers an authentication sequence using 
the configured
-   * {@link AuthenticationHandler}.
+   * [[AuthenticationHandler]].
    *
    * @param request     the request object.
    * @param response    the response object.
@@ -158,7 +158,7 @@ class AuthenticationFilter(conf: KyuubiConf) extends Filter 
with Logging {
   }
 
   override def destroy(): Unit = {
-    if (!authSchemeHandlers.isEmpty) {
+    if (authSchemeHandlers.nonEmpty) {
       authSchemeHandlers.values.foreach(_.destroy())
       authSchemeHandlers.clear()
     }
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationHandler.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationHandler.scala
index acbc52f35..bf2cb5bbe 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationHandler.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/AuthenticationHandler.scala
@@ -46,14 +46,14 @@ trait AuthenticationHandler {
   /**
    * Destroys the authentication handler instance.
    * <p>
-   * This method is invoked by the {@link AuthenticationFilter# destroy} 
method.
+   * This method is invoked by the [[AuthenticationFilter.destroy]] method.
    */
   def destroy(): Unit
 
   /**
    * Performs an authentication step for the given HTTP client request.
    * <p>
-   * This method is invoked by the {@link AuthenticationFilter} only if the 
HTTP client request is
+   * This method is invoked by the [[AuthenticationFilter]] only if the HTTP 
client request is
    * not yet authenticated.
    * <p>
    * Depending upon the authentication mechanism being implemented, a 
particular HTTP client may
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosAuthenticationHandler.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosAuthenticationHandler.scala
index 19a31feb6..04603f30a 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosAuthenticationHandler.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosAuthenticationHandler.scala
@@ -46,7 +46,7 @@ class KerberosAuthenticationHandler extends 
AuthenticationHandler with Logging {
   override val authScheme: AuthScheme = AuthSchemes.NEGOTIATE
 
   override def authenticationSupported: Boolean = {
-    !keytab.isEmpty && !principal.isEmpty
+    keytab.nonEmpty && principal.nonEmpty
   }
 
   override def init(conf: KyuubiConf): Unit = {
@@ -141,7 +141,7 @@ class KerberosAuthenticationHandler extends 
AuthenticationHandler with Logging {
         GSSCredential.ACCEPT_ONLY)
       gssContext = gssManager.createContext(gssCreds)
       val serverToken = gssContext.acceptSecContext(clientToken, 0, 
clientToken.length)
-      if (serverToken != null && serverToken.length > 0) {
+      if (serverToken != null && serverToken.nonEmpty) {
         val authenticate = Base64.getEncoder.encodeToString(serverToken)
         response.setHeader(WWW_AUTHENTICATE, s"$NEGOTIATE $authenticate")
       }
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosUtil.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosUtil.scala
index 8ff079373..a5b95678c 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosUtil.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/http/authentication/KerberosUtil.scala
@@ -201,7 +201,7 @@ object KerberosUtil {
     val names = ticket.get(0xA2, 0x30, 0xA1, 0x30)
     val sb = new StringBuilder
     while (names.hasNext) {
-      if (sb.length > 0) {
+      if (sb.nonEmpty) {
         sb.append('/')
       }
       sb.append(names.next.getAsString)
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/TrinoContext.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/TrinoContext.scala
index 16fc0388a..842f0ceec 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/TrinoContext.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/TrinoContext.scala
@@ -333,7 +333,7 @@ object TrinoContext {
 
     if (rowSet.getColumns == null) {
       return rowSet.getRows.asScala
-        .map(t => t.getColVals.asScala.map(v => 
v.getFieldValue.asInstanceOf[Object]).asJava)
+        .map(t => t.getColVals.asScala.map(v => v.getFieldValue).asJava)
         .asJava
     }
 
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/v1/StatementResource.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/v1/StatementResource.scala
index 124b84688..c6b5550cc 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/v1/StatementResource.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/server/trino/api/v1/StatementResource.scala
@@ -91,7 +91,7 @@ private[v1] class StatementResource extends ApiRequestContext 
with Logging {
           TrinoContext.buildTrinoResponse(qr, query.context)
         case ExecuteForPreparing(statementId, parameters) =>
           val parametersMap = new util.HashMap[Integer, String]()
-          for (i <- 0 until parameters.size) {
+          for (i <- parameters.indices) {
             parametersMap.put(i + 1, parameters(i))
           }
           trinoContext.preparedStatement.get(statementId).map { originSql =>
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSession.scala 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSession.scala
index 7316e367b..a4c345af3 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSession.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/session/KyuubiSession.scala
@@ -36,9 +36,9 @@ abstract class KyuubiSession(
 
   val sessionType: SessionType
 
-  val connectionUrl = conf.get(KYUUBI_SESSION_CONNECTION_URL_KEY).getOrElse("")
+  val connectionUrl = conf.getOrElse(KYUUBI_SESSION_CONNECTION_URL_KEY, "")
 
-  val realUser = conf.get(KYUUBI_SESSION_REAL_USER_KEY).getOrElse(user)
+  val realUser = conf.getOrElse(KYUUBI_SESSION_REAL_USER_KEY, user)
 
   def getSessionEvent: Option[KyuubiSessionEvent]
 
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/RunnableCommand.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/RunnableCommand.scala
index 54ca9f689..deda7d006 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/RunnableCommand.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/sql/plan/command/RunnableCommand.scala
@@ -46,7 +46,7 @@ trait RunnableCommand extends KyuubiTreeNode {
     }
     val taken = iter.take(rowSetSize)
     val resultRowSet = RowSetHelper.toTRowSet(
-      taken.toList.asInstanceOf[List[Row]],
+      taken.toList,
       resultSchema,
       protocolVersion)
     resultRowSet.setStartRowOffset(iter.getPosition)
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/util/KubernetesUtils.scala 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/util/KubernetesUtils.scala
index 0c934b51d..f9780bb16 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/util/KubernetesUtils.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/util/KubernetesUtils.scala
@@ -102,7 +102,7 @@ object KubernetesUtils extends Logging {
   implicit private class OptionConfigurableConfigBuilder(val configBuilder: 
ConfigBuilder)
     extends AnyVal {
 
-    def withOption[T](option: Option[T])(configurator: ((T, ConfigBuilder) => 
ConfigBuilder))
+    def withOption[T](option: Option[T])(configurator: (T, ConfigBuilder) => 
ConfigBuilder)
         : ConfigBuilder = {
       option.map { opt =>
         configurator(opt, configBuilder)
diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/util/Validator.scala 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/util/Validator.scala
index 00eca3604..7bada5ebe 100644
--- a/kyuubi-server/src/main/scala/org/apache/kyuubi/util/Validator.scala
+++ b/kyuubi-server/src/main/scala/org/apache/kyuubi/util/Validator.scala
@@ -38,7 +38,7 @@ object Validator {
 
   private val dns1123LabelFmt = "[a-z0-9]([-a-z0-9]*[a-z0-9])?"
 
-  private val podConfValidator = 
(s"^$dns1123LabelFmt(\\.$dns1123LabelFmt)*$$").r.pattern
+  private val podConfValidator = 
s"^$dns1123LabelFmt(\\.$dns1123LabelFmt)*$$".r.pattern
 
   val KUBERNETES_DNS_SUBDOMAIN_NAME_MAX_LENGTH = 253
 
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala
index 8b050684a..08b36b84a 100644
--- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala
+++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/EngineRefTests.scala
@@ -220,7 +220,7 @@ trait EngineRefTests extends KyuubiFunSuite {
     conf.set(HighAvailabilityConf.HA_NAMESPACE, "engine_test")
     conf.set(HighAvailabilityConf.HA_ADDRESSES, getConnectString())
     conf.set(ENGINE_POOL_SELECT_POLICY, "POLLING")
-    (0 until (10)).foreach { i =>
+    (0 until 10).foreach { i =>
       val engine7 = new EngineRef(conf, user, 
PluginLoader.loadGroupProvider(conf), id, null)
       val engineNumber = 
Integer.parseInt(engine7.subdomain.substring(pool_name.length + 1))
       assert(engineNumber == (i % conf.get(ENGINE_POOL_SIZE)))
@@ -285,7 +285,7 @@ trait EngineRefTests extends KyuubiFunSuite {
     val times = new Array[Long](3)
     val executor = Executors.newFixedThreadPool(3)
     try {
-      (0 until (3)).foreach { i =>
+      (0 until 3).foreach { i =>
         val cloned = conf.clone
         executor.execute(() => {
           DiscoveryClientProvider.withDiscoveryClient(cloned) { client =>
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
index 6af7e21e2..40f7b2f5e 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.kyuubi.engine.spark.operation
+package org.apache.kyuubi.engine.spark
 
 import java.io.PrintWriter
 import java.nio.file.Files
@@ -158,12 +158,12 @@ class PySparkTests extends WithKyuubiServer with 
HiveJDBCTestHelper {
     }
   }
 
-  private def withTempPyFile(code: String)(op: (String) => Unit): Unit = {
+  private def withTempPyFile(code: String)(op: String => Unit): Unit = {
     val tempPyFile = Files.createTempFile("", ".py").toFile
     try {
       new PrintWriter(tempPyFile) {
         write(code)
-        close
+        close()
       }
       op(tempPyFile.getPath)
     } finally {
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index 7b204dafb..e70acf8ad 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -165,17 +165,15 @@ class SparkProcessBuilderSuite extends 
KerberizedTestHelper with MockitoSugar {
 
         val config = KyuubiConf().set(KyuubiConf.ENGINE_LOG_TIMEOUT, 20000L)
         (1 to 10).foreach { _ =>
-          pool.execute(new Runnable {
-            override def run(): Unit = {
-              val pb = new FakeSparkProcessBuilder(config) {
-                override val workingDir: Path = fakeWorkDir
-              }
-              try {
-                val p = pb.start
-                p.waitFor()
-              } finally {
-                pb.close()
-              }
+          pool.execute(() => {
+            val pb = new FakeSparkProcessBuilder(config) {
+              override val workingDir: Path = fakeWorkDir
+            }
+            try {
+              val p = pb.start
+              p.waitFor()
+            } finally {
+              pb.close()
             }
           })
         }
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpPerUserSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpPerUserSuite.scala
index d30dd94a3..b475e75de 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpPerUserSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/operation/thrift/http/KyuubiOperationThriftHttpPerUserSuite.scala
@@ -34,5 +34,5 @@ class KyuubiOperationThriftHttpPerUserSuite extends 
KyuubiOperationPerUserSuite
     
s"jdbc:hive2://${server.frontendServices.head.connectionUrl}/;transportMode=http;"
 +
       s"httpPath=cliservice;"
 
-  override protected lazy val httpMode = true;
+  override protected lazy val httpMode = true
 }
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala
index f7a086de4..da9b8ae44 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/AdminResourceSuite.scala
@@ -289,7 +289,7 @@ class AdminResourceSuite extends KyuubiFunSuite with 
RestFrontendTestHelper {
       assert(200 == response.getStatus)
       assert(client.pathExists(engineSpace))
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        assert(client.getChildren(engineSpace).size == 0, s"refId same with 
$id?")
+        assert(client.getChildren(engineSpace).isEmpty, s"refId same with 
$id?")
       }
 
       // kill the engine application
@@ -335,7 +335,7 @@ class AdminResourceSuite extends KyuubiFunSuite with 
RestFrontendTestHelper {
       assert(200 == response.getStatus)
       assert(client.pathExists(engineSpace))
       eventually(timeout(5.seconds), interval(100.milliseconds)) {
-        assert(client.getChildren(engineSpace).size == 0, s"refId same with 
$id?")
+        assert(client.getChildren(engineSpace).isEmpty, s"refId same with 
$id?")
       }
 
       // kill the engine application
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
index 62a7fec13..8e0a80c4d 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/api/v1/BatchesResourceSuite.scala
@@ -376,7 +376,7 @@ class BatchesResourceSuite extends KyuubiFunSuite with 
RestFrontendTestHelper wi
     assert(response6.getStatus == 200)
     val getBatchListResponse6 = 
response6.readEntity(classOf[GetBatchesResponse])
     assert(getBatchListResponse6.getTotal == 1)
-    sessionManager.allSessions().map(_.close())
+    sessionManager.allSessions().foreach(_.close())
 
     val queryCreateTime = System.currentTimeMillis()
     val response7 = webTarget.path("api/v1/batches")
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilterSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilterSuite.scala
index 9a79d7922..de4b056ff 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilterSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/http/authentication/AuthenticationFilterSuite.scala
@@ -25,7 +25,7 @@ class AuthenticationFilterSuite extends KyuubiFunSuite {
   test("add auth handler and destroy") {
     val filter = new AuthenticationFilter(KyuubiConf())
     filter.addAuthHandler(new BasicAuthenticationHandler(null))
-    assert(filter.authSchemeHandlers.size == 0)
+    assert(filter.authSchemeHandlers.isEmpty)
     filter.addAuthHandler(new BasicAuthenticationHandler(AuthTypes.LDAP))
     assert(filter.authSchemeHandlers.size == 1)
     filter.addAuthHandler(new BasicAuthenticationHandler(AuthTypes.LDAP))
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala
index 91cd33e58..8479a2a3a 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/AdminRestApiSuite.scala
@@ -88,7 +88,7 @@ class AdminRestApiSuite extends RestClientTestHelper {
     assert(result == s"Engine ${engineSpace} is deleted successfully.")
 
     engines = adminRestApi.listEngines("spark_sql", "user", "default", 
"").asScala
-    assert(engines.size == 0)
+    assert(engines.isEmpty)
   }
 
   test("list/close session") {
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/SessionRestApiSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/SessionRestApiSuite.scala
index a1f0fc5ee..a1dfd2432 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/SessionRestApiSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/rest/client/SessionRestApiSuite.scala
@@ -28,7 +28,6 @@ import org.scalatest.concurrent.PatienceConfiguration.Timeout
 
 import org.apache.kyuubi.RestClientTestHelper
 import org.apache.kyuubi.client.{KyuubiRestClient, SessionRestApi}
-import org.apache.kyuubi.client.api.v1.dto
 import org.apache.kyuubi.client.api.v1.dto._
 import org.apache.kyuubi.client.exception.KyuubiRestException
 import org.apache.kyuubi.config.KyuubiConf
@@ -73,7 +72,7 @@ class SessionRestApiSuite extends RestClientTestHelper {
 
       // get session event
       val kyuubiEvent = sessionRestApi.getSessionEvent(
-        
sessionHandle.getIdentifier.toString).asInstanceOf[dto.KyuubiSessionEvent]
+        sessionHandle.getIdentifier.toString)
       assert(kyuubiEvent.getConf.get("testConfig").equals("testValue"))
       
assert(kyuubiEvent.getSessionType.equals(SessionType.INTERACTIVE.toString))
     }
@@ -169,7 +168,7 @@ class SessionRestApiSuite extends RestClientTestHelper {
   test("fix kyuubi session leak caused by engine stop") {
     withSessionRestApi { sessionRestApi =>
       // close all sessions
-      var sessions = sessionRestApi.listSessions().asScala
+      val sessions = sessionRestApi.listSessions().asScala
       sessions.foreach(session => 
sessionRestApi.closeSession(session.getIdentifier))
 
       // open new session
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
index 44602759c..1ace58612 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/server/trino/api/v1/StatementResourceSuite.scala
@@ -51,9 +51,9 @@ class StatementResourceSuite extends KyuubiFunSuite with 
TrinoRestFrontendTestHe
 
     val trinoResponseIter = Iterator.iterate(TrinoResponse(response = 
Option(response)))(getData)
     val isErr = trinoResponseIter.takeWhile(_.isEnd == false).exists { t =>
-      t.queryError != None && t.response == None
+      t.queryError.isDefined && t.response.isEmpty
     }
-    assert(isErr == true)
+    assert(isErr)
   }
 
   test("statement submit and get result") {
@@ -61,10 +61,7 @@ class StatementResourceSuite extends KyuubiFunSuite with 
TrinoRestFrontendTestHe
       .request().post(Entity.entity("select 1", MediaType.TEXT_PLAIN_TYPE))
 
     val trinoResponseIter = Iterator.iterate(TrinoResponse(response = 
Option(response)))(getData)
-    val dataSet = trinoResponseIter
-      .takeWhile(_.isEnd == false)
-      .map(_.data)
-      .flatten.toList
+    val dataSet = trinoResponseIter.takeWhile(_.isEnd == 
false).flatMap(_.data).toList
     assert(dataSet == List(List(1)))
   }
 

Reply via email to