This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3badfd2c41d3 [SPARK-51026][CORE][TESTS] Remove the unnecessary test 
file `StagePageSuite.scala`
3badfd2c41d3 is described below

commit 3badfd2c41d33fe5f1cbd1d7345c749b7af81574
Author: yangjie01 <[email protected]>
AuthorDate: Wed Jan 29 12:36:35 2025 +0800

    [SPARK-51026][CORE][TESTS] Remove the unnecessary test file 
`StagePageSuite.scala`
    
    ### What changes were proposed in this pull request?
    This pr removes a test file `StagePageSuite.scala` that is no longer needed.
    
    ### Why are the changes needed?
    The current `StagePageSuite.scala` contains one test case and one private 
method. The reasons for cleaning them up are as follows:
    
    1. The assertion in the test case "ApiHelper.COLUMN_TO_INDEX should match 
headers of the task table" was cleaned up in SPARK-44490 | 
https://github.com/apache/spark/pull/42085. Currently, this test case only 
creates a `StageData` object and an `AppStatusStore` object, with no other 
operations or assertions. Moreover, the creation of these objects is also 
covered in other test cases, such as `KVStoreProtobufSerializerSuite`, 
`AppStatusStoreSuite`, etc. Therefore, this is already an unn [...]
    
    2. The private method `renderStagePage` has not been used since SPARK-21809 
| https://github.com/apache/spark/pull/21688, so it can be cleaned up.
    
    After cleaning up the above content, `StagePageSuite.scala` becomes an 
empty file, so it can be completely deleted.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #49721 from LuciferYang/SPARK-51026.
    
    Authored-by: yangjie01 <[email protected]>
    Signed-off-by: yangjie01 <[email protected]>
---
 .../scala/org/apache/spark/ui/StagePageSuite.scala | 162 ---------------------
 1 file changed, 162 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala 
b/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
deleted file mode 100644
index c76c97d07141..000000000000
--- a/core/src/test/scala/org/apache/spark/ui/StagePageSuite.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.ui
-
-import scala.xml.Node
-
-import jakarta.servlet.http.HttpServletRequest
-import org.mockito.Mockito.{mock, when, RETURNS_SMART_NULLS}
-
-import org.apache.spark._
-import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics}
-import org.apache.spark.internal.config.Status._
-import org.apache.spark.resource.ResourceProfile
-import org.apache.spark.scheduler._
-import org.apache.spark.status.AppStatusStore
-import org.apache.spark.status.api.v1.{AccumulableInfo => UIAccumulableInfo, 
StageData, StageStatus}
-import org.apache.spark.ui.jobs.{StagePage, StagesTab}
-
-class StagePageSuite extends SparkFunSuite with LocalSparkContext {
-
-  private val peakExecutionMemory = 10
-
-  test("ApiHelper.COLUMN_TO_INDEX should match headers of the task table") {
-    val conf = new SparkConf(false).set(LIVE_ENTITY_UPDATE_PERIOD, 0L)
-    val statusStore = AppStatusStore.createLiveStore(conf)
-    try {
-      val stageData = new StageData(
-        status = StageStatus.ACTIVE,
-        stageId = 1,
-        attemptId = 1,
-        numTasks = 1,
-        numActiveTasks = 1,
-        numCompleteTasks = 1,
-        numFailedTasks = 1,
-        numKilledTasks = 1,
-        numCompletedIndices = 1,
-
-        submissionTime = None,
-        firstTaskLaunchedTime = None,
-        completionTime = None,
-        failureReason = None,
-
-        executorDeserializeTime = 1L,
-        executorDeserializeCpuTime = 1L,
-        executorRunTime = 1L,
-        executorCpuTime = 1L,
-        resultSize = 1L,
-        jvmGcTime = 1L,
-        resultSerializationTime = 1L,
-        memoryBytesSpilled = 1L,
-        diskBytesSpilled = 1L,
-        peakExecutionMemory = 1L,
-        inputBytes = 1L,
-        inputRecords = 1L,
-        outputBytes = 1L,
-        outputRecords = 1L,
-        shuffleRemoteBlocksFetched = 1L,
-        shuffleLocalBlocksFetched = 1L,
-        shuffleFetchWaitTime = 1L,
-        shuffleRemoteBytesRead = 1L,
-        shuffleRemoteBytesReadToDisk = 1L,
-        shuffleLocalBytesRead = 1L,
-        shuffleReadBytes = 1L,
-        shuffleReadRecords = 1L,
-        shuffleCorruptMergedBlockChunks = 1L,
-        shuffleMergedFetchFallbackCount = 1L,
-        shuffleMergedRemoteBlocksFetched = 1L,
-        shuffleMergedLocalBlocksFetched = 1L,
-        shuffleMergedRemoteChunksFetched = 1L,
-        shuffleMergedLocalChunksFetched = 1L,
-        shuffleMergedRemoteBytesRead = 1L,
-        shuffleMergedLocalBytesRead = 1L,
-        shuffleRemoteReqsDuration = 1L,
-        shuffleMergedRemoteReqsDuration = 1L,
-        shuffleWriteBytes = 1L,
-        shuffleWriteTime = 1L,
-        shuffleWriteRecords = 1L,
-
-        name = "stage1",
-        description = Some("description"),
-        details = "detail",
-        schedulingPool = "pool1",
-
-        rddIds = Seq(1),
-        accumulatorUpdates = Seq(new UIAccumulableInfo(0L, "acc", None, 
"value")),
-        tasks = None,
-        executorSummary = None,
-        speculationSummary = None,
-        killedTasksSummary = Map.empty,
-        ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID,
-        peakExecutorMetrics = None,
-        taskMetricsDistributions = None,
-        executorMetricsDistributions = None,
-        isShufflePushEnabled = false,
-        shuffleMergersCount = 0
-      )
-    } finally {
-      statusStore.close()
-    }
-  }
-
-  /**
-   * Render a stage page started with the given conf and return the HTML.
-   * This also runs a dummy stage to populate the page with useful content.
-   */
-  private def renderStagePage(): Seq[Node] = {
-    val conf = new SparkConf(false).set(LIVE_ENTITY_UPDATE_PERIOD, 0L)
-    val statusStore = AppStatusStore.createLiveStore(conf)
-    val listener = statusStore.listener.get
-
-    try {
-      val tab = mock(classOf[StagesTab], RETURNS_SMART_NULLS)
-      when(tab.store).thenReturn(statusStore)
-
-      val request = mock(classOf[HttpServletRequest])
-      when(tab.conf).thenReturn(conf)
-      when(tab.appName).thenReturn("testing")
-      when(tab.headerTabs).thenReturn(Seq.empty)
-      when(request.getParameter("id")).thenReturn("0")
-      when(request.getParameter("attempt")).thenReturn("0")
-      val page = new StagePage(tab, statusStore)
-
-      // Simulate a stage in job progress listener
-      val stageInfo = new StageInfo(0, 0, "dummy", 1, Seq.empty, Seq.empty, 
"details",
-        resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID)
-      // Simulate two tasks to test PEAK_EXECUTION_MEMORY correctness
-      (1 to 2).foreach {
-        taskId =>
-          val taskInfo = new TaskInfo(taskId, taskId, 0, taskId, 0,
-            "0", "localhost", TaskLocality.ANY, false)
-          listener.onStageSubmitted(SparkListenerStageSubmitted(stageInfo))
-          listener.onTaskStart(SparkListenerTaskStart(0, 0, taskInfo))
-          taskInfo.markFinished(TaskState.FINISHED, System.currentTimeMillis())
-          val taskMetrics = TaskMetrics.empty
-          val executorMetrics = new ExecutorMetrics
-          taskMetrics.incPeakExecutionMemory(peakExecutionMemory)
-          listener.onTaskEnd(SparkListenerTaskEnd(0, 0, "result", Success, 
taskInfo,
-            executorMetrics, taskMetrics))
-      }
-      listener.onStageCompleted(SparkListenerStageCompleted(stageInfo))
-      page.render(request)
-    } finally {
-      statusStore.close()
-    }
-  }
-
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to