This is an automated email from the ASF dual-hosted git repository.
chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git
The following commit(s) were added to refs/heads/master by this push:
new dfbb6069c [KYUUBI #6518] Support extracting URL for Spark 4 on YARN
dfbb6069c is described below
commit dfbb6069cadfe7d6b44f85de91319d393e87b7a4
Author: Cheng Pan <[email protected]>
AuthorDate: Thu Jul 4 22:57:02 2024 +0800
[KYUUBI #6518] Support extracting URL for Spark 4 on YARN
# :mag: Description
## Issue References ๐
SPARK-48238 replaced YARN AmIpFilter with a forked implementation, the code
should be changed too.
## Types of changes :bookmark:
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing
functionality to change)
## Test Plan ๐งช
Review.
---
# Checklist ๐
- [x] This patch was not authored or co-authored using [Generative
Tooling](https://www.apache.org/legal/generative-tooling.html)
**Be nice. Be informative.**
Closes #6518 from pan3793/spark-4-url.
Closes #6518
c5026500b [Cheng Pan] Support extracting URL for Spark 4 on YARN
Authored-by: Cheng Pan <[email protected]>
Signed-off-by: Cheng Pan <[email protected]>
---
.../apache/kyuubi/engine/spark/KyuubiSparkUtil.scala | 17 ++++++++++++++---
1 file changed, 14 insertions(+), 3 deletions(-)
diff --git
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
index 90212d4dc..8c19ea5c2 100644
---
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
+++
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/KyuubiSparkUtil.scala
@@ -57,9 +57,20 @@ object KyuubiSparkUtil extends Logging {
def engineId: String = globalSparkContext.applicationId
def engineName: String = globalSparkContext.appName
- def engineUrl: String = globalSparkContext.getConf.getOption(
-
"spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_URI_BASES")
- .orElse(globalSparkContext.uiWebUrl).getOrElse("")
+ def engineUrl: String = {
+ val sparkConf = globalSparkContext.getConf
+ sparkConf
+ // scalastyle:off line.size.limit
+ // format: off
+ // for Spark 3.5 or before
+
.getOption("spark.org.apache.hadoop.yarn.server.webproxy.amfilter.AmIpFilter.param.PROXY_URI_BASES")
+ // for Spark 4.0 or later, see SPARK-48238
+
.orElse(sparkConf.getOption("spark.org.apache.spark.deploy.yarn.AmIpFilter.param.PROXY_URI_BASES"))
+ // format: on
+ // scalastyle:on line.size.limit
+ .orElse(globalSparkContext.uiWebUrl)
+ .getOrElse("")
+ }
def deployMode: String = {
if
(globalSparkContext.getConf.getBoolean("spark.kubernetes.submitInDriver",
false)) {
"cluster"