This is an automated email from the ASF dual-hosted git repository.

vhs pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hudi.git


The following commit(s) were added to refs/heads/master by this push:
     new e4bc9851bf58 Explicitly state the spark stage name (#18416)
e4bc9851bf58 is described below

commit e4bc9851bf58ddd189de036137a29e7615828baf
Author: Surya Prasanna <[email protected]>
AuthorDate: Sun Mar 29 23:45:34 2026 -0700

    Explicitly state the spark stage name (#18416)
---
 .../org/apache/hudi/metadata/FileSystemBackedTableMetadata.java  | 9 +++++++--
 1 file changed, 7 insertions(+), 2 deletions(-)

diff --git 
a/hudi-common/src/main/java/org/apache/hudi/metadata/FileSystemBackedTableMetadata.java
 
b/hudi-common/src/main/java/org/apache/hudi/metadata/FileSystemBackedTableMetadata.java
index a5c51b594641..2b882977ca8b 100644
--- 
a/hudi-common/src/main/java/org/apache/hudi/metadata/FileSystemBackedTableMetadata.java
+++ 
b/hudi-common/src/main/java/org/apache/hudi/metadata/FileSystemBackedTableMetadata.java
@@ -156,12 +156,16 @@ public class FileSystemBackedTableMetadata extends 
AbstractHoodieTableMetadata {
       needPushDownExpressions = false;
     }
 
+    int recursiveListingStep = 0;
     while (!pathsToList.isEmpty()) {
+      recursiveListingStep++;
       // TODO: Get the parallelism from HoodieWriteConfig
       int listingParallelism = Math.min(DEFAULT_LISTING_PARALLELISM, 
pathsToList.size());
+      String recursiveListingJobName =
+          String.format("%s recursive listing step %d", 
this.getClass().getSimpleName(), recursiveListingStep);
 
       // List all directories in parallel
-      engineContext.setJobStatus(this.getClass().getSimpleName(),
+      engineContext.setJobStatus(recursiveListingJobName,
           "Listing all partitions on " + this.tableName
               + " with prefix " + relativePathPrefix);
       // Need to use serializable file status here, see HUDI-5936
@@ -183,7 +187,8 @@ public class FileSystemBackedTableMetadata extends 
AbstractHoodieTableMetadata {
       if (!dirToFileListingPairs.isEmpty()) {
         // result below holds a list of pair. first entry in the pair 
optionally holds the deduced list of partitions.
         // and second entry holds optionally a directory path to be processed 
further.
-        engineContext.setJobStatus(this.getClass().getSimpleName(), 
"Processing listed partitions");
+        engineContext.setJobStatus(recursiveListingJobName,
+            "Processing recursively listed partitions on " + this.tableName);
         List<Pair<Option<String>, Option<StoragePath>>> result =
             engineContext.map(dirToFileListingPairs,
                 fileInfoPair -> {

Reply via email to