This is an automated email from the ASF dual-hosted git repository.

aokolnychyi pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new 8fcc891c2e Spark 3.4: Set metricsReporter for more scan types (#8445)
8fcc891c2e is described below

commit 8fcc891c2e49442381e039bc707864067f2c9c4a
Author: Zhen Wang <[email protected]>
AuthorDate: Thu Sep 14 23:32:42 2023 +0800

    Spark 3.4: Set metricsReporter for more scan types (#8445)
    
    Fixes #8444.
---
 .../org/apache/iceberg/spark/source/SparkScanBuilder.java    | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git 
a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScanBuilder.java
 
b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScanBuilder.java
index 55b0096bf6..09c09c6caa 100644
--- 
a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScanBuilder.java
+++ 
b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/source/SparkScanBuilder.java
@@ -480,7 +480,8 @@ public class SparkScanBuilder
             .fromSnapshotExclusive(startSnapshotId)
             .caseSensitive(caseSensitive)
             .filter(filterExpression())
-            .project(expectedSchema);
+            .project(expectedSchema)
+            .metricsReporter(metricsReporter);
 
     if (endSnapshotId != null) {
       scan = scan.toSnapshot(endSnapshotId);
@@ -559,7 +560,8 @@ public class SparkScanBuilder
             .newIncrementalChangelogScan()
             .caseSensitive(caseSensitive)
             .filter(filterExpression())
-            .project(expectedSchema);
+            .project(expectedSchema)
+            .metricsReporter(metricsReporter);
 
     if (startSnapshotId != null) {
       scan = scan.fromSnapshotExclusive(startSnapshotId);
@@ -629,7 +631,8 @@ public class SparkScanBuilder
             .useSnapshot(snapshotId)
             .caseSensitive(caseSensitive)
             .filter(filterExpression())
-            .project(expectedSchema);
+            .project(expectedSchema)
+            .metricsReporter(metricsReporter);
 
     scan = configureSplitPlanning(scan);
 
@@ -665,7 +668,8 @@ public class SparkScanBuilder
             .ignoreResiduals()
             .caseSensitive(caseSensitive)
             .filter(filterExpression())
-            .project(expectedSchema);
+            .project(expectedSchema)
+            .metricsReporter(metricsReporter);
 
     scan = configureSplitPlanning(scan);
 

Reply via email to