This is an automated email from the ASF dual-hosted git repository.

mingliang pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 2272efee85 [VL] Update user-facing metric name for `wallNanos` in 
batch scan (#10870)
2272efee85 is described below

commit 2272efee85d0a7d1667e49c2cc1cde18f4b3beb7
Author: Zouxxyy <[email protected]>
AuthorDate: Tue Oct 14 08:55:00 2025 +0800

    [VL] Update user-facing metric name for `wallNanos` in batch scan (#10870)
---
 .../scala/org/apache/gluten/backendsapi/velox/VeloxMetricsApi.scala   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxMetricsApi.scala
 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxMetricsApi.scala
index 12c66a6a1a..57ccd3446e 100644
--- 
a/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxMetricsApi.scala
+++ 
b/backends-velox/src/main/scala/org/apache/gluten/backendsapi/velox/VeloxMetricsApi.scala
@@ -95,9 +95,9 @@ class VeloxMetricsApi extends MetricsApi with Logging {
       "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of 
output rows"),
       "outputVectors" -> SQLMetrics.createMetric(sparkContext, "number of 
output vectors"),
       "outputBytes" -> SQLMetrics.createSizeMetric(sparkContext, "number of 
output bytes"),
-      "wallNanos" -> SQLMetrics.createNanoTimingMetric(sparkContext, "time of 
batch scan"),
+      "wallNanos" -> SQLMetrics.createNanoTimingMetric(sparkContext, "time of 
scan and filter"),
       "cpuCount" -> SQLMetrics.createMetric(sparkContext, "cpu wall time 
count"),
-      "scanTime" -> SQLMetrics.createNanoTimingMetric(sparkContext, "scan 
time"),
+      "scanTime" -> SQLMetrics.createNanoTimingMetric(sparkContext, "time of 
scan"),
       "peakMemoryBytes" -> SQLMetrics.createSizeMetric(sparkContext, "peak 
memory bytes"),
       "numMemoryAllocations" -> SQLMetrics.createMetric(
         sparkContext,


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to