This is an automated email from the ASF dual-hosted git repository.

blue pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new c63a906  Rename ORC reader project config for consistency (#879)
c63a906 is described below

commit c63a906147eaa0a686bed763816e8230501b5dce
Author: dingxiaokun <[email protected]>
AuthorDate: Tue Mar 31 05:07:37 2020 +0800

    Rename ORC reader project config for consistency (#879)
---
 data/src/main/java/org/apache/iceberg/data/TableScanIterable.java     | 2 +-
 .../java/org/apache/iceberg/data/orc/TestGenericReadProjection.java   | 2 +-
 orc/src/main/java/org/apache/iceberg/orc/ORC.java                     | 4 ++--
 .../src/main/java/org/apache/iceberg/spark/source/RowDataReader.java  | 2 +-
 .../test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java   | 2 +-
 5 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/data/src/main/java/org/apache/iceberg/data/TableScanIterable.java 
b/data/src/main/java/org/apache/iceberg/data/TableScanIterable.java
index 75c479e..4ce36cd 100644
--- a/data/src/main/java/org/apache/iceberg/data/TableScanIterable.java
+++ b/data/src/main/java/org/apache/iceberg/data/TableScanIterable.java
@@ -103,7 +103,7 @@ class TableScanIterable extends CloseableGroup implements 
CloseableIterable<Reco
 
       case ORC:
         ORC.ReadBuilder orc = ORC.read(input)
-                .schema(projection)
+                .project(projection)
                 .createReaderFunc(fileSchema -> 
GenericOrcReader.buildReader(projection, fileSchema))
                 .split(task.start(), task.length());
 
diff --git 
a/data/src/test/java/org/apache/iceberg/data/orc/TestGenericReadProjection.java 
b/data/src/test/java/org/apache/iceberg/data/orc/TestGenericReadProjection.java
index 9f92fa4..ccb1346 100644
--- 
a/data/src/test/java/org/apache/iceberg/data/orc/TestGenericReadProjection.java
+++ 
b/data/src/test/java/org/apache/iceberg/data/orc/TestGenericReadProjection.java
@@ -46,7 +46,7 @@ public class TestGenericReadProjection extends 
TestReadProjection {
     }
 
     Iterable<Record> records = ORC.read(Files.localInput(file))
-        .schema(readSchema)
+        .project(readSchema)
         .createReaderFunc(fileSchema -> 
GenericOrcReader.buildReader(readSchema, fileSchema))
         .build();
 
diff --git a/orc/src/main/java/org/apache/iceberg/orc/ORC.java 
b/orc/src/main/java/org/apache/iceberg/orc/ORC.java
index fc88fa9..7a3e0ce 100644
--- a/orc/src/main/java/org/apache/iceberg/orc/ORC.java
+++ b/orc/src/main/java/org/apache/iceberg/orc/ORC.java
@@ -148,8 +148,8 @@ public class ORC {
       return this;
     }
 
-    public ReadBuilder schema(org.apache.iceberg.Schema projectSchema) {
-      this.schema = projectSchema;
+    public ReadBuilder project(Schema newSchema) {
+      this.schema = newSchema;
       return this;
     }
 
diff --git 
a/spark/src/main/java/org/apache/iceberg/spark/source/RowDataReader.java 
b/spark/src/main/java/org/apache/iceberg/spark/source/RowDataReader.java
index ff5efea..3e4c213 100644
--- a/spark/src/main/java/org/apache/iceberg/spark/source/RowDataReader.java
+++ b/spark/src/main/java/org/apache/iceberg/spark/source/RowDataReader.java
@@ -185,7 +185,7 @@ class RowDataReader extends BaseDataReader<InternalRow> {
       FileScanTask task,
       Schema readSchema) {
     return ORC.read(location)
-        .schema(readSchema)
+        .project(readSchema)
         .split(task.start(), task.length())
         .createReaderFunc(SparkOrcReader::new)
         .caseSensitive(caseSensitive)
diff --git 
a/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java 
b/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java
index 4f1b136..fefdce9 100644
--- a/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java
+++ b/spark/src/test/java/org/apache/iceberg/spark/data/TestSparkOrcReader.java
@@ -49,7 +49,7 @@ public class TestSparkOrcReader extends AvroDataTest {
     }
 
     try (CloseableIterable<InternalRow> reader = 
ORC.read(Files.localInput(testFile))
-        .schema(schema)
+        .project(schema)
         .createReaderFunc(SparkOrcReader::new)
         .build()) {
       final Iterator<InternalRow> actualRows = reader.iterator();

Reply via email to