This is an automated email from the ASF dual-hosted git repository.

mridulm80 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 38fc127a232e [SPARK-46696][CORE] In ResourceProfileManager, function 
calls should occur after variable declarations
38fc127a232e is described below

commit 38fc127a232ed4005e6436cdef4c5a1f1bba95f7
Author: liangyongyuan <liangyongy...@xiaomi.com>
AuthorDate: Thu Jan 18 02:21:50 2024 -0600

    [SPARK-46696][CORE] In ResourceProfileManager, function calls should occur 
after variable declarations
    
    ### What changes were proposed in this pull request?
    In ResourceProfileManager, function calls should occur after variable 
declarations
    
    ### Why are the changes needed?
    As the title suggests, in `ResourceProfileManager`, function calls should 
be made after variable declarations. When determining `isSupport`, all 
variables are uninitialized, with booleans defaulting to false and objects to 
null. While the end result is correct, the evaluation process is abnormal.
    
![image](https://github.com/apache/spark/assets/46274164/0e15b7e6-bd91-4d46-b220-758c131392c7)
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    through exists uts
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #44705 from lyy-pineapple/SPARK-46696.
    
    Authored-by: liangyongyuan <liangyongy...@xiaomi.com>
    Signed-off-by: Mridul Muralidharan <mridul<at>gmail.com>
---
 .../org/apache/spark/resource/ResourceProfileManager.scala    | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/resource/ResourceProfileManager.scala 
b/core/src/main/scala/org/apache/spark/resource/ResourceProfileManager.scala
index afbacb801364..580a5b7bb07a 100644
--- a/core/src/main/scala/org/apache/spark/resource/ResourceProfileManager.scala
+++ b/core/src/main/scala/org/apache/spark/resource/ResourceProfileManager.scala
@@ -45,11 +45,6 @@ private[spark] class ResourceProfileManager(sparkConf: 
SparkConf,
     (lock.readLock(), lock.writeLock())
   }
 
-  private val defaultProfile = 
ResourceProfile.getOrCreateDefaultProfile(sparkConf)
-  addResourceProfile(defaultProfile)
-
-  def defaultResourceProfile: ResourceProfile = defaultProfile
-
   private val dynamicEnabled = Utils.isDynamicAllocationEnabled(sparkConf)
   private val master = sparkConf.getOption("spark.master")
   private val isYarn = master.isDefined && master.get.equals("yarn")
@@ -60,12 +55,18 @@ private[spark] class ResourceProfileManager(sparkConf: 
SparkConf,
   private val notRunningUnitTests = !isTesting
   private val testExceptionThrown = 
sparkConf.get(RESOURCE_PROFILE_MANAGER_TESTING)
 
+  private val defaultProfile = 
ResourceProfile.getOrCreateDefaultProfile(sparkConf)
+  addResourceProfile(defaultProfile)
+
+  def defaultResourceProfile: ResourceProfile = defaultProfile
+
   /**
    * If we use anything except the default profile, it's supported on YARN, 
Kubernetes and
    * Standalone with dynamic allocation enabled, and task resource profile 
with dynamic allocation
    * disabled on Standalone. Throw an exception if not supported.
    */
   private[spark] def isSupported(rp: ResourceProfile): Boolean = {
+    assert(master != null)
     if (rp.isInstanceOf[TaskResourceProfile] && !dynamicEnabled) {
       if ((notRunningUnitTests || testExceptionThrown) &&
         !(isStandaloneOrLocalCluster || isYarn || isK8s)) {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to