This is an automated email from the ASF dual-hosted git repository.

openinx pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/master by this push:
     new 9f07b83  Core: Fix the NPE in DataFiles.Builder#copy (#2852)
9f07b83 is described below

commit 9f07b83725e05206219368c020c3c772771a63d0
Author: openinx <[email protected]>
AuthorDate: Thu Jul 29 15:35:07 2021 +0800

    Core: Fix the NPE in DataFiles.Builder#copy (#2852)
---
 core/src/main/java/org/apache/iceberg/DataFiles.java                | 2 +-
 .../java/org/apache/iceberg/spark/source/TestSparkDataFile.java     | 6 +++++-
 2 files changed, 6 insertions(+), 2 deletions(-)

diff --git a/core/src/main/java/org/apache/iceberg/DataFiles.java 
b/core/src/main/java/org/apache/iceberg/DataFiles.java
index fc7e6fa..a765dc7 100644
--- a/core/src/main/java/org/apache/iceberg/DataFiles.java
+++ b/core/src/main/java/org/apache/iceberg/DataFiles.java
@@ -121,7 +121,7 @@ public class DataFiles {
     private FileFormat format = null;
     private long recordCount = -1L;
     private long fileSizeInBytes = -1L;
-    private int sortOrderId = SortOrder.unsorted().orderId();
+    private Integer sortOrderId = SortOrder.unsorted().orderId();
 
     // optional fields
     private Map<Integer, Long> columnSizes = null;
diff --git 
a/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java 
b/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
index 637ad7c..cffbab5 100644
--- a/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
+++ b/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java
@@ -28,6 +28,7 @@ import java.util.Map;
 import java.util.stream.Collectors;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.iceberg.DataFile;
+import org.apache.iceberg.DataFiles;
 import org.apache.iceberg.ManifestFile;
 import org.apache.iceberg.ManifestFiles;
 import org.apache.iceberg.ManifestReader;
@@ -160,7 +161,10 @@ public abstract class TestSparkDataFile {
 
     List<DataFile> dataFiles = Lists.newArrayList();
     try (ManifestReader<DataFile> reader = 
ManifestFiles.read(manifests.get(0), table.io())) {
-      reader.forEach(dataFile -> dataFiles.add(dataFile.copy()));
+      for (DataFile dataFile : reader) {
+        checkDataFile(dataFile.copy(), 
DataFiles.builder(table.spec()).copy(dataFile).build());
+        dataFiles.add(dataFile.copy());
+      }
     }
 
     Dataset<Row> dataFileDF = 
spark.read().format("iceberg").load(tableLocation + "#files");

Reply via email to