This is an automated email from the ASF dual-hosted git repository.

wangzhen pushed a commit to branch branch-1.8
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/branch-1.8 by this push:
     new 8c1c2edf0 [KYUUBI #5766] Default `spark.yarn.maxAppAttempts` to 1 for 
spark engine
8c1c2edf0 is described below

commit 8c1c2edf0fadc29df2902dff7fed6871526b134c
Author: wforget <[email protected]>
AuthorDate: Thu Dec 7 20:05:54 2023 +0800

    [KYUUBI #5766] Default `spark.yarn.maxAppAttempts` to 1 for spark engine
    
    # :mag: Description
    ## Issue References ๐Ÿ”—
    
    This pull request fixes #5766
    
    ## Describe Your Solution ๐Ÿ”ง
    
    As discussed in 
https://github.com/apache/kyuubi/issues/5766#issuecomment-1835285025, we should 
add `spark.yarn.maxAppAttempts=1` for spark engine when `spark.master` is 
`yarn`.
    
    ## Types of changes :bookmark:
    
    - [X] Bugfix (non-breaking change which fixes an issue)
    - [ ] New feature (non-breaking change which adds functionality)
    - [ ] Breaking change (fix or feature that would cause existing 
functionality to change)
    
    ## Test Plan ๐Ÿงช
    
    #### Behavior Without This Pull Request :coffin:
    
    #### Behavior With This Pull Request :tada:
    
    #### Related Unit Tests
    
    ---
    
    # Checklists
    ## ๐Ÿ“ Author Self Checklist
    
    - [x] My code follows the [style 
guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html)
 of this project
    - [x] I have performed a self-review
    - [x] I have commented my code, particularly in hard-to-understand areas
    - [ ] I have made corresponding changes to the documentation
    - [x] My changes generate no new warnings
    - [ ] I have added tests that prove my fix is effective or that my feature 
works
    - [ ] New and existing unit tests pass locally with my changes
    - [x] This patch was not authored or co-authored using [Generative 
Tooling](https://www.apache.org/legal/generative-tooling.html)
    
    ## ๐Ÿ“ Committer Pre-Merge Checklist
    
    - [x] Pull request title is okay.
    - [x] No license issues.
    - [x] Milestone correctly set?
    - [ ] Test coverage is ok
    - [x] Assignees are selected.
    - [x] Minimum number of approvals
    - [x] No changes are requested
    
    **Be nice. Be informative.**
    
    Closes #5798 from wForget/KYUUBI-5766-2.
    
    Closes #5766
    
    6477dfdf0 [wforget] fix
    c50f656d8 [wforget] fix order
    dbc1891e7 [wforget] comment
    a493e29cc [wforget] fix style
    4fa0651fa [wforget] fix test
    b899646ba [wforget] add test
    954a30de8 [wforget] [KYUUBI #5766] Default `spark.yarn.maxAppAttempts` to 1 
for spark engine
    
    Authored-by: wforget <[email protected]>
    Signed-off-by: wforget <[email protected]>
    (cherry picked from commit 6a282fc5e9585e8715906846e10bd17adfc6eda7)
    Signed-off-by: wforget <[email protected]>
---
 .../kyuubi/engine/spark/SparkProcessBuilder.scala      | 18 ++++++++++++++++--
 .../kyuubi/engine/spark/SparkProcessBuilderSuite.scala | 14 ++++++++++++++
 2 files changed, 30 insertions(+), 2 deletions(-)

diff --git 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
index d147e5290..972284f5c 100644
--- 
a/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
+++ 
b/kyuubi-server/src/main/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilder.scala
@@ -140,8 +140,9 @@ class SparkProcessBuilder(
       allConf = allConf ++ zkAuthKeytabFileConf(allConf)
     }
     // pass spark engine log path to spark conf
-    (allConf ++ engineLogPathConf ++ appendPodNameConf(allConf)).foreach { 
case (k, v) =>
-      buffer ++= confKeyValue(convertConfigKey(k), v)
+    (allConf ++ engineLogPathConf ++ extraYarnConf(allConf) ++ 
appendPodNameConf(allConf)).foreach {
+      case (k, v) =>
+        buffer ++= confKeyValue(convertConfigKey(k), v)
     }
 
     setupKerberos(buffer)
@@ -258,6 +259,18 @@ class SparkProcessBuilder(
     map.result().toMap
   }
 
+  def extraYarnConf(conf: Map[String, String]): Map[String, String] = {
+    val map = mutable.Map.newBuilder[String, String]
+    if 
(clusterManager().exists(_.toLowerCase(Locale.ROOT).startsWith("yarn"))) {
+      if (!conf.contains(YARN_MAX_APP_ATTEMPTS_KEY)) {
+        // Set `spark.yarn.maxAppAttempts` to 1 to avoid invalid attempts.
+        // As mentioned in YARN-5617, it is improved after hadoop 
`2.8.2/2.9.0/3.0.0`.
+        map += (YARN_MAX_APP_ATTEMPTS_KEY -> "1")
+      }
+    }
+    map.result().toMap
+  }
+
   override def clusterManager(): Option[String] = {
     conf.getOption(MASTER_KEY).orElse(defaultsConf.get(MASTER_KEY))
   }
@@ -308,6 +321,7 @@ object SparkProcessBuilder {
   final val KUBERNETES_NAMESPACE_KEY = "spark.kubernetes.namespace"
   final val KUBERNETES_DRIVER_POD_NAME = "spark.kubernetes.driver.pod.name"
   final val KUBERNETES_EXECUTOR_POD_NAME_PREFIX = 
"spark.kubernetes.executor.podNamePrefix"
+  final val YARN_MAX_APP_ATTEMPTS_KEY = "spark.yarn.maxAppAttempts"
   final val INTERNAL_RESOURCE = "spark-internal"
 
   /**
diff --git 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
index 6b498628b..8cbbed5af 100644
--- 
a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
+++ 
b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/SparkProcessBuilderSuite.scala
@@ -424,6 +424,20 @@ class SparkProcessBuilderSuite extends 
KerberizedTestHelper with MockitoSugar {
           }
     }
   }
+
+  test("default spark.yarn.maxAppAttempts conf in yarn mode") {
+    val conf1 = KyuubiConf(false)
+    conf1.set("spark.master", "k8s://test:12345")
+    val builder1 = new SparkProcessBuilder("", conf1)
+    val commands1 = builder1.toString.split(' ')
+    assert(!commands1.contains("spark.yarn.maxAppAttempts"))
+
+    val conf2 = KyuubiConf(false)
+    conf2.set("spark.master", "yarn")
+    val builder2 = new SparkProcessBuilder("", conf2)
+    val commands2 = builder2.toString.split(' ')
+    assert(commands2.contains("spark.yarn.maxAppAttempts=1"))
+  }
 }
 
 class FakeSparkProcessBuilder(config: KyuubiConf)

Reply via email to