This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new bca5f3a0f09e [SPARK-53132][CORE][TESTS][FOLLOWUP] Use
`Utils.listFiles` in `BlockManagerDecommissionIntegrationSuite`
bca5f3a0f09e is described below
commit bca5f3a0f09e9020f4bda9b07ce70d7c685f9549
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Tue Aug 5 21:59:45 2025 -0700
[SPARK-53132][CORE][TESTS][FOLLOWUP] Use `Utils.listFiles` in
`BlockManagerDecommissionIntegrationSuite`
### What changes were proposed in this pull request?
This PR is a follow-up to fix the last instance which was missed due to the
new line between `FileUtils` and `.listFiles`.
- #51856
### Why are the changes needed?
To simplify the usage consistently.
```scala
- def shuffleFiles: Seq[File] = {
- FileUtils
- .listFiles(new File(sparkTempDir), Array("data", "index"), true)
- .asScala
- .toSeq
- }
+ def shuffleFiles: Seq[File] = Utils.listFiles(new
File(sparkTempDir)).asScala
+ .filter(f => Array("data",
"index").exists(f.getName.endsWith)).toSeq
```
### Does this PR introduce _any_ user-facing change?
No, this is a test case change.
### How was this patch tested?
Pass the CIs.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #51865 from dongjoon-hyun/SPARK-53132-2.
Authored-by: Dongjoon Hyun <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../storage/BlockManagerDecommissionIntegrationSuite.scala | 11 +++--------
1 file changed, 3 insertions(+), 8 deletions(-)
diff --git
a/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala
b/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala
index 1c4c00c03a47..2b2a67c3c00a 100644
---
a/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala
+++
b/core/src/test/scala/org/apache/spark/storage/BlockManagerDecommissionIntegrationSuite.scala
@@ -25,14 +25,13 @@ import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.jdk.CollectionConverters._
-import org.apache.commons.io.FileUtils
import org.scalatest.concurrent.Eventually
import org.apache.spark._
import org.apache.spark.internal.config
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.StandaloneSchedulerBackend
-import org.apache.spark.util.{ResetSystemProperties, SystemClock, ThreadUtils}
+import org.apache.spark.util.{ResetSystemProperties, SystemClock, ThreadUtils,
Utils}
import org.apache.spark.util.ArrayImplicits._
class BlockManagerDecommissionIntegrationSuite extends SparkFunSuite with
LocalSparkContext
@@ -361,12 +360,8 @@ class BlockManagerDecommissionIntegrationSuite extends
SparkFunSuite with LocalS
val sparkTempDir = System.getProperty("java.io.tmpdir")
- def shuffleFiles: Seq[File] = {
- FileUtils
- .listFiles(new File(sparkTempDir), Array("data", "index"), true)
- .asScala
- .toSeq
- }
+ def shuffleFiles: Seq[File] = Utils.listFiles(new
File(sparkTempDir)).asScala
+ .filter(f => Array("data", "index").exists(f.getName.endsWith)).toSeq
val existingShuffleFiles = shuffleFiles
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]