This is an automated email from the ASF dual-hosted git repository.
zuston pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-uniffle.git
The following commit(s) were added to refs/heads/master by this push:
new 91191e62d [#1087] feat(spark)(gluten): Support dynamic allocation for
Gluten Uniffle (#1649)
91191e62d is described below
commit 91191e62dc9d3fdd929de67bb6bb4f179989fd6d
Author: summaryzb <[email protected]>
AuthorDate: Tue Apr 16 19:03:53 2024 +0800
[#1087] feat(spark)(gluten): Support dynamic allocation for Gluten Uniffle
(#1649)
### What changes were proposed in this pull request?
Support dynamic allocation for Uniffle and Gluten when use spark3.2,
spark3.3, spark3.4
### Why are the changes needed?
ShuffleManager name in
`https://github.com/apache/incubator-gluten/tree/main/gluten-uniffle` differs
from `RssShuffleManager`
Fix: https://github.com/apache/incubator-uniffle/issues/1087
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
manul integration test
---
patch/spark/spark-3.2.1_dynamic_allocation_support.patch | 7 +++++--
patch/spark/spark-3.3.1_dynamic_allocation_support.patch | 9 ++++++---
patch/spark/spark-3.4.1_dynamic_allocation_support.patch | 7 +++++--
3 files changed, 16 insertions(+), 7 deletions(-)
diff --git a/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
b/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
index 81046dec0..815f7bdc6 100644
--- a/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.2.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@ diff --git
a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/mai
index 5f37a1abb19..af4bee1e1bb 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v
}.mkString("\n")
}
+ /**
+ * Return true if remote shuffle service is enabled.
+ */
-+ def isRssEnable(): Boolean = get("spark.shuffle.manager",
"sort").contains("RssShuffleManager")
++ def isRssEnable(): Boolean = {
++ val shuffleMgr = get("spark.shuffle.manager", "sort")
++ shuffleMgr.contains("RssShuffleManager") ||
shuffleMgr.contains("UniffleShuffleManager")
++ }
}
private[spark] object SparkConf extends Logging {
diff --git a/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
b/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
index 194e873cd..517555d81 100644
--- a/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.3.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@ diff --git
a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/mai
index 5f37a1abb19..af4bee1e1bb 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v
}.mkString("\n")
}
-
+
+ /**
+ * Return true if remote shuffle service is enabled.
+ */
-+ def isRssEnable(): Boolean = get("spark.shuffle.manager",
"sort").contains("RssShuffleManager")
++ def isRssEnable(): Boolean = {
++ val shuffleMgr = get("spark.shuffle.manager", "sort")
++ shuffleMgr.contains("RssShuffleManager") ||
shuffleMgr.contains("UniffleShuffleManager")
++ }
}
private[spark] object SparkConf extends Logging {
diff --git a/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
b/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
index 2f79bba3b..0ea65123e 100644
--- a/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
+++ b/patch/spark/spark-3.4.1_dynamic_allocation_support.patch
@@ -34,14 +34,17 @@ diff --git
a/core/src/main/scala/org/apache/spark/SparkConf.scala b/core/src/mai
index 08344d8e547..ff3bab6710d 100644
--- a/core/src/main/scala/org/apache/spark/SparkConf.scala
+++ b/core/src/main/scala/org/apache/spark/SparkConf.scala
-@@ -580,6 +580,10 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
+@@ -580,6 +580,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable
with Logging with Seria
Utils.redact(this, getAll).sorted.map { case (k, v) => k + "=" + v
}.mkString("\n")
}
+ /**
+ * Return true if remote shuffle service is enabled.
+ */
-+ def isRssEnable(): Boolean = get("spark.shuffle.manager",
"sort").contains("RssShuffleManager")
++ def isRssEnable(): Boolean = {
++ val shuffleMgr = get("spark.shuffle.manager", "sort")
++ shuffleMgr.contains("RssShuffleManager") ||
shuffleMgr.contains("UniffleShuffleManager")
++ }
}
private[spark] object SparkConf extends Logging {