This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch branch-1.9
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/branch-1.9 by this push:
     new bd33b7b17 [KYUUBI #6390] Temporarily disable UI Tab for Spark 4.0 and 
above
bd33b7b17 is described below

commit bd33b7b176cf11a647650798456f51c405de9f81
Author: Cheng Pan <[email protected]>
AuthorDate: Mon May 20 17:20:38 2024 +0800

    [KYUUBI #6390] Temporarily disable UI Tab for Spark 4.0 and above
    
    # :mag: Description
    
    Spark 4.0 migrated from `javax.servlet` to `jakarta.servlet` in 
SPARK-47118, which breaks the Kyuubi UI tab.
    
    ## Describe Your Solution ๐Ÿ”ง
    
    Temporarily disable UI Tab for Spark 4.0 and above.
    
    ## Types of changes :bookmark:
    
    - [ ] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    Build Kyuubi with Spark 3.5 and successfully launch the Spark engine with 
Spark 4.0.0 preview1.
    
    <img width="1339" alt="image" 
src="https://github.com/apache/kyuubi/assets/26535726/34a001e5-5c70-4928-bf28-743825c0ead3";>
    
    ---
    
    # Checklist ๐Ÿ“
    
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    **Be nice. Be informative.**
    
    Closes #6390 from pan3793/ui-4.0.
    
    Closes #6390
    
    2abad8c35 [Cheng Pan] Temporarily disable UI Tab for Spark 4.0 and above
    
    Authored-by: Cheng Pan <[email protected]>
    Signed-off-by: Cheng Pan <[email protected]>
    (cherry picked from commit 70eadc15add3fe102c3a67a199d1796751eed0ca)
    Signed-off-by: Cheng Pan <[email protected]>
---
 .../apache/kyuubi/engine/spark/SparkSQLEngine.scala    | 18 +++++++++++-------
 1 file changed, 11 insertions(+), 7 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
index 5ed67963b..c9011e73a 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/SparkSQLEngine.scala
@@ -27,7 +27,7 @@ import scala.util.control.NonFatal
 
 import com.google.common.annotations.VisibleForTesting
 import org.apache.hadoop.fs.Path
-import org.apache.spark.{ui, SparkConf}
+import org.apache.spark.{ui, SPARK_VERSION, SparkConf}
 import org.apache.spark.kyuubi.{SparkContextHelper, 
SparkSQLEngineEventListener, SparkSQLEngineListener}
 import org.apache.spark.kyuubi.SparkUtilsHelper.getLocalDir
 import org.apache.spark.sql.SparkSession
@@ -46,7 +46,7 @@ import org.apache.kyuubi.ha.HighAvailabilityConf._
 import org.apache.kyuubi.ha.client.RetryPolicies
 import org.apache.kyuubi.service.Serverable
 import org.apache.kyuubi.session.SessionHandle
-import org.apache.kyuubi.util.{SignalRegister, ThreadUtils}
+import org.apache.kyuubi.util.{SemanticVersion, SignalRegister, ThreadUtils}
 import 
org.apache.kyuubi.util.ThreadUtils.scheduleTolerableRunnableWithFixedDelay
 
 case class SparkSQLEngine(spark: SparkSession) extends 
Serverable("SparkSQLEngine") {
@@ -358,11 +358,15 @@ object SparkSQLEngine extends Logging {
         engine.start()
         val kvStore = SparkContextHelper.getKvStore(spark.sparkContext)
         val store = new EngineEventsStore(kvStore)
-        ui.EngineTab(
-          Some(engine),
-          SparkContextHelper.getSparkUI(spark.sparkContext),
-          store,
-          kyuubiConf)
+        if (SemanticVersion(SPARK_VERSION) >= "4.0") {
+          warn("Kyuubi UI does not support Spark 4.0 and above yet")
+        } else {
+          ui.EngineTab(
+            Some(engine),
+            SparkContextHelper.getSparkUI(spark.sparkContext),
+            store,
+            kyuubiConf)
+        }
         val event = EngineEvent(engine)
         info(event)
         EventBus.post(event)

Reply via email to