This is an automated email from the ASF dual-hosted git repository.
huaxingao pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/main by this push:
new 6e873baa7d Test: Avoid deprecated AvroParquetWriter.builder(Path file)
(#14620)
6e873baa7d is described below
commit 6e873baa7db22429fa231d552a696248f64ea1f7
Author: Yuya Ebihara <[email protected]>
AuthorDate: Thu Nov 20 09:18:27 2025 +0900
Test: Avoid deprecated AvroParquetWriter.builder(Path file) (#14620)
* Test: Avoid deprecated AvroParquetWriter.builder(Path file)
* fixup! Test: Avoid deprecated AvroParquetWriter.builder(Path file)
---
.../test/java/org/apache/iceberg/data/parquet/TestGenericData.java | 5 +++--
.../iceberg/data/parquet/TestParquetEncryptionWithWriteSupport.java | 5 +++--
.../java/org/apache/iceberg/flink/data/TestFlinkParquetReader.java | 4 ++--
parquet/src/test/java/org/apache/iceberg/parquet/TestParquet.java | 3 ++-
4 files changed, 10 insertions(+), 7 deletions(-)
diff --git
a/data/src/test/java/org/apache/iceberg/data/parquet/TestGenericData.java
b/data/src/test/java/org/apache/iceberg/data/parquet/TestGenericData.java
index f19f972829..8c0e2e903a 100644
--- a/data/src/test/java/org/apache/iceberg/data/parquet/TestGenericData.java
+++ b/data/src/test/java/org/apache/iceberg/data/parquet/TestGenericData.java
@@ -28,7 +28,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecordBuilder;
-import org.apache.hadoop.fs.Path;
import org.apache.iceberg.Files;
import org.apache.iceberg.Schema;
import org.apache.iceberg.avro.AvroSchemaUtil;
@@ -45,6 +44,7 @@ import
org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.io.LocalOutputFile;
import org.junit.jupiter.api.Test;
public class TestGenericData extends DataTestBase {
@@ -142,7 +142,8 @@ public class TestGenericData extends DataTestBase {
File testFile = temp.resolve("test-file" + System.nanoTime()).toFile();
ParquetWriter<org.apache.avro.generic.GenericRecord> writer =
- AvroParquetWriter.<org.apache.avro.generic.GenericRecord>builder(new
Path(testFile.toURI()))
+ AvroParquetWriter.<org.apache.avro.generic.GenericRecord>builder(
+ new LocalOutputFile(testFile.toPath()))
.withDataModel(GenericData.get())
.withSchema(avroSchema)
.config("parquet.avro.add-list-element-records", "true")
diff --git
a/data/src/test/java/org/apache/iceberg/data/parquet/TestParquetEncryptionWithWriteSupport.java
b/data/src/test/java/org/apache/iceberg/data/parquet/TestParquetEncryptionWithWriteSupport.java
index 32d858d932..a100350660 100644
---
a/data/src/test/java/org/apache/iceberg/data/parquet/TestParquetEncryptionWithWriteSupport.java
+++
b/data/src/test/java/org/apache/iceberg/data/parquet/TestParquetEncryptionWithWriteSupport.java
@@ -31,7 +31,6 @@ import java.util.ArrayList;
import java.util.List;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericRecordBuilder;
-import org.apache.hadoop.fs.Path;
import org.apache.iceberg.Files;
import org.apache.iceberg.Schema;
import org.apache.iceberg.avro.AvroSchemaUtil;
@@ -50,6 +49,7 @@ import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.crypto.FileEncryptionProperties;
import org.apache.parquet.crypto.ParquetCryptoRuntimeException;
import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.io.LocalOutputFile;
import org.junit.jupiter.api.Test;
public class TestParquetEncryptionWithWriteSupport extends DataTestBase {
@@ -160,7 +160,8 @@ public class TestParquetEncryptionWithWriteSupport extends
DataTestBase {
.build();
ParquetWriter<org.apache.avro.generic.GenericRecord> writer =
- AvroParquetWriter.<org.apache.avro.generic.GenericRecord>builder(new
Path(testFile.toURI()))
+ AvroParquetWriter.<org.apache.avro.generic.GenericRecord>builder(
+ new LocalOutputFile(testFile.toPath()))
.withDataModel(GenericData.get())
.withSchema(avroSchema)
.withEncryption(fileEncryptionProperties)
diff --git
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/data/TestFlinkParquetReader.java
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/data/TestFlinkParquetReader.java
index 4e8c9f03f8..006c55d1b8 100644
---
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/data/TestFlinkParquetReader.java
+++
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/data/TestFlinkParquetReader.java
@@ -32,7 +32,6 @@ import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.types.logical.LogicalType;
-import org.apache.hadoop.fs.Path;
import org.apache.iceberg.Files;
import org.apache.iceberg.Schema;
import org.apache.iceberg.avro.AvroSchemaUtil;
@@ -52,6 +51,7 @@ import
org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.types.Types;
import org.apache.parquet.avro.AvroParquetWriter;
import org.apache.parquet.hadoop.ParquetWriter;
+import org.apache.parquet.io.LocalOutputFile;
import org.apache.parquet.schema.LogicalTypeAnnotation;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.PrimitiveType;
@@ -188,7 +188,7 @@ public class TestFlinkParquetReader extends DataTestBase {
assertThat(testFile.delete()).isTrue();
ParquetWriter<GenericRecord> writer =
- AvroParquetWriter.<GenericRecord>builder(new Path(testFile.toURI()))
+ AvroParquetWriter.<GenericRecord>builder(new
LocalOutputFile(testFile.toPath()))
.withDataModel(GenericData.get())
.withSchema(avroSchema)
.config("parquet.avro.add-list-element-records", "true")
diff --git a/parquet/src/test/java/org/apache/iceberg/parquet/TestParquet.java
b/parquet/src/test/java/org/apache/iceberg/parquet/TestParquet.java
index c33229ee8f..f38329e606 100644
--- a/parquet/src/test/java/org/apache/iceberg/parquet/TestParquet.java
+++ b/parquet/src/test/java/org/apache/iceberg/parquet/TestParquet.java
@@ -59,6 +59,7 @@ import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.metadata.BlockMetaData;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
+import org.apache.parquet.io.LocalOutputFile;
import org.apache.parquet.schema.MessageType;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
@@ -193,7 +194,7 @@ public class TestParquet {
File testFile = new File(temp.toFile(), "test" + System.nanoTime() +
".parquet");
ParquetWriter<GenericRecord> writer =
- AvroParquetWriter.<GenericRecord>builder(new
org.apache.hadoop.fs.Path(testFile.toURI()))
+ AvroParquetWriter.<GenericRecord>builder(new
LocalOutputFile(testFile.toPath()))
.withDataModel(GenericData.get())
.withSchema(avroSchema)
.config("parquet.avro.add-list-element-records", "true")