This is an automated email from the ASF dual-hosted git repository.
fokko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-java.git
The following commit(s) were added to refs/heads/master by this push:
new aa41aa121 Update to Java 11 (#3379)
aa41aa121 is described below
commit aa41aa121ff29e360273675edc899ba1c2640047
Author: Aaron Niskode-Dossett <[email protected]>
AuthorDate: Tue Jan 13 16:48:57 2026 -0600
Update to Java 11 (#3379)
---
.../IncrementallyUpdatedFilterPredicate.java | 4 +-
.../parquet/io/RecordReaderImplementation.java | 2 +-
.../parquet/schema/LogicalTypeAnnotation.java | 6 +-
.../column/statistics/TestSizeStatistics.java | 16 ++---
.../ByteStreamSplitValuesReaderTest.java | 2 +-
.../rle/TestRunLengthBitPackingHybridEncoder.java | 5 +-
.../filter2/recordlevel/TestValueInspector.java | 3 +-
.../column/columnindex/TestColumnIndexBuilder.java | 27 ++++---
.../parquet/io/ExpectationValidatingConverter.java | 3 +-
.../java/org/apache/parquet/io/TestColumnIO.java | 6 +-
.../parquet/schema/TestPrimitiveStringifier.java | 14 ++--
.../org/apache/parquet/SemanticVersionTest.java | 3 +-
.../TestConcatenatingByteBufferCollector.java | 4 +-
.../parquet/bytes/TestMultiBufferInputStream.java | 3 +-
.../java/org/apache/parquet/glob/TestGlob.java | 54 +++++++-------
.../format/converter/ParquetMetadataConverter.java | 4 +-
.../org/apache/parquet/hadoop/PrintFooter.java | 3 +-
.../mapred/DeprecatedParquetInputFormat.java | 3 +-
.../crypto/keytools/samples/VaultClient.java | 4 +-
.../apache/parquet/encodings/FileEncodingsIT.java | 5 +-
.../parquet/filter2/compat/TestRowGroupFilter.java | 23 +++---
.../dictionarylevel/DictionaryFilterTest.java | 5 +-
.../recordlevel/TestRecordLevelFilters.java | 13 ++--
.../statisticslevel/TestStatisticsFilter.java | 84 ++++++++++------------
.../converter/TestParquetMetadataConverter.java | 22 +++---
.../apache/parquet/hadoop/TestBloomFiltering.java | 3 +-
.../parquet/hadoop/TestColumnIndexFiltering.java | 3 +-
.../parquet/hadoop/TestDataPageChecksums.java | 19 +++--
.../org/apache/parquet/hadoop/TestInputFormat.java | 4 +-
.../hadoop/TestInputFormatColumnProjection.java | 3 +-
.../parquet/hadoop/TestMergeMetadataFiles.java | 10 ++-
.../parquet/hadoop/TestParquetFileWriter.java | 43 ++++++-----
.../apache/parquet/hadoop/TestParquetReader.java | 5 +-
.../hadoop/TestParquetReaderRandomAccess.java | 3 +-
.../apache/parquet/hadoop/TestParquetWriter.java | 4 +-
.../parquet/hadoop/TestParquetWriterNewPage.java | 4 +-
.../parquet/hadoop/TestStoreBloomFilter.java | 3 +-
.../hadoop/example/TestInputOutputFormat.java | 3 +-
.../hadoop/rewrite/ParquetRewriterTest.java | 12 ++--
.../parquet/hadoop/util/ColumnEncryptorTest.java | 21 +++---
.../parquet/hadoop/util/ColumnPrunerTest.java | 21 +++---
.../parquet/statistics/TestColumnIndexes.java | 2 +-
.../statistics/TestFloat16ReadWriteRoundTrip.java | 5 +-
.../parquet/statistics/TestFloat16Statistics.java | 5 +-
.../statistics/TestSizeStatisticsRoundTrip.java | 10 +--
.../apache/parquet/statistics/TestStatistics.java | 7 +-
46 files changed, 233 insertions(+), 275 deletions(-)
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
index c2aab2b6b..7aea56b2a 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
@@ -18,7 +18,7 @@
*/
package org.apache.parquet.filter2.recordlevel;
-import java.util.Arrays;
+import java.util.List;
import java.util.Objects;
import org.apache.parquet.io.api.Binary;
@@ -153,7 +153,7 @@ public interface IncrementallyUpdatedFilterPredicate {
private final Iterable<ValueInspector> delegates;
DelegatingValueInspector(ValueInspector... delegates) {
- this.delegates = Arrays.asList(delegates);
+ this.delegates = List.of(delegates);
}
/**
diff --git
a/parquet-column/src/main/java/org/apache/parquet/io/RecordReaderImplementation.java
b/parquet-column/src/main/java/org/apache/parquet/io/RecordReaderImplementation.java
index ac2f74be6..46c4b714a 100644
---
a/parquet-column/src/main/java/org/apache/parquet/io/RecordReaderImplementation.java
+++
b/parquet-column/src/main/java/org/apache/parquet/io/RecordReaderImplementation.java
@@ -488,6 +488,6 @@ class RecordReaderImplementation<T> extends RecordReader<T>
{
protected Iterable<ColumnReader> getColumnReaders() {
// Converting the array to an iterable ensures that the array cannot be
altered
- return Arrays.asList(columnReaders);
+ return List.of(columnReaders);
}
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/schema/LogicalTypeAnnotation.java
b/parquet-column/src/main/java/org/apache/parquet/schema/LogicalTypeAnnotation.java
index be98e071f..98bc5c023 100644
---
a/parquet-column/src/main/java/org/apache/parquet/schema/LogicalTypeAnnotation.java
+++
b/parquet-column/src/main/java/org/apache/parquet/schema/LogicalTypeAnnotation.java
@@ -18,7 +18,6 @@
*/
package org.apache.parquet.schema;
-import static java.util.Arrays.asList;
import static java.util.Optional.empty;
import static
org.apache.parquet.schema.ColumnOrder.ColumnOrderName.TYPE_DEFINED_ORDER;
import static org.apache.parquet.schema.ColumnOrder.ColumnOrderName.UNDEFINED;
@@ -33,8 +32,6 @@ import static
org.apache.parquet.schema.PrimitiveStringifier.TIME_NANOS_UTC_STRI
import static org.apache.parquet.schema.PrimitiveStringifier.TIME_STRINGIFIER;
import static
org.apache.parquet.schema.PrimitiveStringifier.TIME_UTC_STRINGIFIER;
-import java.util.Collections;
-import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
@@ -822,8 +819,7 @@ public abstract class LogicalTypeAnnotation {
}
public static class IntLogicalTypeAnnotation extends LogicalTypeAnnotation {
- private static final Set<Integer> VALID_BIT_WIDTH =
- Collections.unmodifiableSet(new HashSet<>(asList(8, 16, 32, 64)));
+ private static final Set<Integer> VALID_BIT_WIDTH = Set.of(8, 16, 32, 64);
private final int bitWidth;
private final boolean isSigned;
diff --git
a/parquet-column/src/test/java/org/apache/parquet/column/statistics/TestSizeStatistics.java
b/parquet-column/src/test/java/org/apache/parquet/column/statistics/TestSizeStatistics.java
index 813298c2b..786d2be2c 100644
---
a/parquet-column/src/test/java/org/apache/parquet/column/statistics/TestSizeStatistics.java
+++
b/parquet-column/src/test/java/org/apache/parquet/column/statistics/TestSizeStatistics.java
@@ -18,8 +18,8 @@
*/
package org.apache.parquet.column.statistics;
-import java.util.Arrays;
import java.util.Collections;
+import java.util.List;
import java.util.Optional;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.LogicalTypeAnnotation;
@@ -47,8 +47,8 @@ public class TestSizeStatistics {
builder.add(1, 1);
SizeStatistics statistics = builder.build();
Assert.assertEquals(Optional.of(3L),
statistics.getUnencodedByteArrayDataBytes());
- Assert.assertEquals(Arrays.asList(3L, 3L, 1L),
statistics.getRepetitionLevelHistogram());
- Assert.assertEquals(Arrays.asList(2L, 2L, 3L),
statistics.getDefinitionLevelHistogram());
+ Assert.assertEquals(List.of(3L, 3L, 1L),
statistics.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(2L, 2L, 3L),
statistics.getDefinitionLevelHistogram());
}
@Test
@@ -67,7 +67,7 @@ public class TestSizeStatistics {
builder.add(1, 0);
SizeStatistics statistics = builder.build();
Assert.assertEquals(Optional.empty(),
statistics.getUnencodedByteArrayDataBytes());
- Assert.assertEquals(Arrays.asList(2L, 4L),
statistics.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(2L, 4L),
statistics.getRepetitionLevelHistogram());
Assert.assertEquals(Collections.emptyList(),
statistics.getDefinitionLevelHistogram());
}
@@ -89,8 +89,8 @@ public class TestSizeStatistics {
SizeStatistics statistics2 = builder2.build();
statistics1.mergeStatistics(statistics2);
Assert.assertEquals(Optional.of(5L),
statistics1.getUnencodedByteArrayDataBytes());
- Assert.assertEquals(Arrays.asList(3L, 1L, 1L),
statistics1.getRepetitionLevelHistogram());
- Assert.assertEquals(Arrays.asList(1L, 3L, 1L),
statistics1.getDefinitionLevelHistogram());
+ Assert.assertEquals(List.of(3L, 1L, 1L),
statistics1.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(1L, 3L, 1L),
statistics1.getDefinitionLevelHistogram());
}
@Test
@@ -122,8 +122,8 @@ public class TestSizeStatistics {
SizeStatistics statistics = builder.build();
SizeStatistics copy = statistics.copy();
Assert.assertEquals(Optional.of(3L),
copy.getUnencodedByteArrayDataBytes());
- Assert.assertEquals(Arrays.asList(1L, 1L, 1L),
copy.getRepetitionLevelHistogram());
- Assert.assertEquals(Arrays.asList(1L, 1L, 1L),
copy.getDefinitionLevelHistogram());
+ Assert.assertEquals(List.of(1L, 1L, 1L),
copy.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(1L, 1L, 1L),
copy.getDefinitionLevelHistogram());
}
@Test
diff --git
a/parquet-column/src/test/java/org/apache/parquet/column/values/bytestreamsplit/ByteStreamSplitValuesReaderTest.java
b/parquet-column/src/test/java/org/apache/parquet/column/values/bytestreamsplit/ByteStreamSplitValuesReaderTest.java
index 348d24559..dc37ed67c 100644
---
a/parquet-column/src/test/java/org/apache/parquet/column/values/bytestreamsplit/ByteStreamSplitValuesReaderTest.java
+++
b/parquet-column/src/test/java/org/apache/parquet/column/values/bytestreamsplit/ByteStreamSplitValuesReaderTest.java
@@ -37,7 +37,7 @@ public class ByteStreamSplitValuesReaderTest {
throws Exception {
ByteBuffer buffer = ByteBuffer.wrap(input);
ByteBufferInputStream stream = ByteBufferInputStream.wrap(buffer);
- Reader reader = cls.newInstance();
+ Reader reader = cls.getDeclaredConstructor().newInstance();
reader.initFromPage(length, stream);
return reader;
}
diff --git
a/parquet-column/src/test/java/org/apache/parquet/column/values/rle/TestRunLengthBitPackingHybridEncoder.java
b/parquet-column/src/test/java/org/apache/parquet/column/values/rle/TestRunLengthBitPackingHybridEncoder.java
index 7ec5a9568..93a6c8deb 100644
---
a/parquet-column/src/test/java/org/apache/parquet/column/values/rle/TestRunLengthBitPackingHybridEncoder.java
+++
b/parquet-column/src/test/java/org/apache/parquet/column/values/rle/TestRunLengthBitPackingHybridEncoder.java
@@ -22,7 +22,6 @@ import static org.junit.Assert.assertEquals;
import java.io.ByteArrayInputStream;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import org.apache.parquet.bytes.BytesUtils;
import org.apache.parquet.bytes.DirectByteBufferAllocator;
@@ -187,7 +186,7 @@ public class TestRunLengthBitPackingHybridEncoder {
assertEquals(3, BytesUtils.readUnsignedVarInt(is));
List<Integer> values = unpack(3, 8, is);
- assertEquals(Arrays.asList(0, 1, 0, 1, 0, 2, 2, 2), values);
+ assertEquals(List.of(0, 1, 0, 1, 0, 2, 2, 2), values);
// header = 100 << 1 = 200
assertEquals(200, BytesUtils.readUnsignedVarInt(is));
@@ -212,7 +211,7 @@ public class TestRunLengthBitPackingHybridEncoder {
List<Integer> values = unpack(5, 16, is);
- assertEquals(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0,
0), values);
+ assertEquals(List.of(1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, 0),
values);
assertEquals(-1, is.read());
}
diff --git
a/parquet-column/src/test/java/org/apache/parquet/filter2/recordlevel/TestValueInspector.java
b/parquet-column/src/test/java/org/apache/parquet/filter2/recordlevel/TestValueInspector.java
index a164e9666..e0c55c276 100644
---
a/parquet-column/src/test/java/org/apache/parquet/filter2/recordlevel/TestValueInspector.java
+++
b/parquet-column/src/test/java/org/apache/parquet/filter2/recordlevel/TestValueInspector.java
@@ -24,7 +24,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import java.util.Arrays;
import java.util.List;
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
import org.junit.Test;
@@ -83,7 +82,7 @@ public class TestValueInspector {
@Test
public void testReusable() {
- List<Integer> values = Arrays.asList(2, 4, 7, 3, 8, 8, 11, 200);
+ List<Integer> values = List.of(2, 4, 7, 3, 8, 8, 11, 200);
ValueInspector v = intIsEven();
for (Integer x : values) {
diff --git
a/parquet-column/src/test/java/org/apache/parquet/internal/column/columnindex/TestColumnIndexBuilder.java
b/parquet-column/src/test/java/org/apache/parquet/internal/column/columnindex/TestColumnIndexBuilder.java
index 58a899eef..6274f3626 100644
---
a/parquet-column/src/test/java/org/apache/parquet/internal/column/columnindex/TestColumnIndexBuilder.java
+++
b/parquet-column/src/test/java/org/apache/parquet/internal/column/columnindex/TestColumnIndexBuilder.java
@@ -18,7 +18,6 @@
*/
package org.apache.parquet.internal.column.columnindex;
-import static java.util.Arrays.asList;
import static org.apache.parquet.filter2.predicate.FilterApi.and;
import static org.apache.parquet.filter2.predicate.FilterApi.binaryColumn;
import static org.apache.parquet.filter2.predicate.FilterApi.booleanColumn;
@@ -688,8 +687,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(BINARY).as(UTF8).named("test_binary_utf8"),
BoundaryOrder.ASCENDING,
- asList(true, true, false, false, true, false, true, false),
- asList(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l),
+ List.of(true, true, false, false, true, false, true, false),
+ List.of(1l, 2l, 3l, 4l, 5l, 6l, 7l, 8l),
toBBList(
null,
null,
@@ -738,7 +737,7 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(BINARY).as(UTF8).named("test_binary_utf8"),
BoundaryOrder.ASCENDING,
- asList(true, true, false, false, true, false, true, false),
+ List.of(true, true, false, false, true, false, true, false),
null,
toBBList(
null,
@@ -904,8 +903,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(BOOLEAN).named("test_boolean"),
BoundaryOrder.DESCENDING,
- asList(false, true, false, true, false, true),
- asList(9l, 8l, 7l, 6l, 5l, 0l),
+ List.of(false, true, false, true, false, true),
+ List.of(9l, 8l, 7l, 6l, 5l, 0l),
toBBList(false, null, false, null, true, null),
toBBList(true, null, false, null, true, null));
assertEquals(BoundaryOrder.DESCENDING, columnIndex.getBoundaryOrder());
@@ -1058,8 +1057,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(DOUBLE).named("test_double"),
BoundaryOrder.UNORDERED,
- asList(false, false, false, false, false, false),
- asList(0l, 1l, 2l, 3l, 4l, 5l),
+ List.of(false, false, false, false, false, false),
+ List.of(0l, 1l, 2l, 3l, 4l, 5l),
toBBList(-1.0, -2.0, -3.0, -4.0, -5.0, -6.0),
toBBList(1.0, 2.0, 3.0, 4.0, 5.0, 6.0));
assertEquals(BoundaryOrder.UNORDERED, columnIndex.getBoundaryOrder());
@@ -1211,8 +1210,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(FLOAT).named("test_float"),
BoundaryOrder.ASCENDING,
- asList(true, true, true, false, false, false),
- asList(9l, 8l, 7l, 6l, 0l, 0l),
+ List.of(true, true, true, false, false, false),
+ List.of(9l, 8l, 7l, 6l, 0l, 0l),
toBBList(null, null, null, -3.0f, -2.0f, 0.1f),
toBBList(null, null, null, -2.0f, 0.0f, 6.0f));
assertEquals(BoundaryOrder.ASCENDING, columnIndex.getBoundaryOrder());
@@ -1345,8 +1344,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(INT32).named("test_int32"),
BoundaryOrder.DESCENDING,
- asList(false, false, false, true, true, true),
- asList(0l, 10l, 0l, 3l, 5l, 7l),
+ List.of(false, false, false, true, true, true),
+ List.of(0l, 10l, 0l, 3l, 5l, 7l),
toBBList(10, 8, 6, null, null, null),
toBBList(9, 7, 5, null, null, null));
assertEquals(BoundaryOrder.DESCENDING, columnIndex.getBoundaryOrder());
@@ -1597,8 +1596,8 @@ public class TestColumnIndexBuilder {
ColumnIndex columnIndex = ColumnIndexBuilder.build(
Types.required(INT64).named("test_int64"),
BoundaryOrder.UNORDERED,
- asList(true, false, true, false, true, false),
- asList(1l, 2l, 3l, 4l, 5l, 6l),
+ List.of(true, false, true, false, true, false),
+ List.of(1l, 2l, 3l, 4l, 5l, 6l),
toBBList(null, 2l, null, 4l, null, 9l),
toBBList(null, 3l, null, 15l, null, 10l));
assertEquals(BoundaryOrder.UNORDERED, columnIndex.getBoundaryOrder());
diff --git
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
index cf8e2edc0..99073c98e 100644
---
a/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
+++
b/parquet-column/src/test/java/org/apache/parquet/io/ExpectationValidatingConverter.java
@@ -21,7 +21,6 @@ package org.apache.parquet.io;
import static org.junit.Assert.assertEquals;
import java.util.ArrayDeque;
-import java.util.Arrays;
import java.util.Deque;
import java.util.List;
import org.apache.parquet.io.api.Binary;
@@ -48,7 +47,7 @@ public class ExpectationValidatingConverter extends
RecordMaterializer<Void> {
}
public ExpectationValidatingConverter(String[] expectations, MessageType
schema) {
- this(new ArrayDeque<>(Arrays.asList(expectations)), schema);
+ this(new ArrayDeque<>(List.of(expectations)), schema);
}
public ExpectationValidatingConverter(Deque<String> expectations,
MessageType schema) {
diff --git
a/parquet-column/src/test/java/org/apache/parquet/io/TestColumnIO.java
b/parquet-column/src/test/java/org/apache/parquet/io/TestColumnIO.java
index ed1e232a8..fa4fab710 100644
--- a/parquet-column/src/test/java/org/apache/parquet/io/TestColumnIO.java
+++ b/parquet-column/src/test/java/org/apache/parquet/io/TestColumnIO.java
@@ -144,7 +144,7 @@ public class TestColumnIO {
@Parameterized.Parameters
public static Collection<Object[]> data() throws IOException {
Object[][] data = {{true}, {false}};
- return Arrays.asList(data);
+ return List.of(data);
}
private boolean useDictionary;
@@ -386,7 +386,7 @@ public class TestColumnIO {
.append("g", new NanoTime(1234, System.currentTimeMillis() * 1000))
.append("h", Binary.fromString("abc"));
- testSchema(oneOfEachSchema, Arrays.asList(g1));
+ testSchema(oneOfEachSchema, List.of(g1));
}
@Test
@@ -398,7 +398,7 @@ public class TestColumnIO {
Group g1 = gf.newGroup();
g1.addGroup("foo").append("bar", 2l);
- testSchema(reqreqSchema, Arrays.asList(g1));
+ testSchema(reqreqSchema, List.of(g1));
}
@Test
diff --git
a/parquet-column/src/test/java/org/apache/parquet/schema/TestPrimitiveStringifier.java
b/parquet-column/src/test/java/org/apache/parquet/schema/TestPrimitiveStringifier.java
index 3101ecea0..b165b200d 100644
---
a/parquet-column/src/test/java/org/apache/parquet/schema/TestPrimitiveStringifier.java
+++
b/parquet-column/src/test/java/org/apache/parquet/schema/TestPrimitiveStringifier.java
@@ -19,7 +19,6 @@
package org.apache.parquet.schema;
import static java.nio.charset.StandardCharsets.UTF_8;
-import static java.util.Arrays.asList;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
@@ -48,6 +47,7 @@ import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.util.Calendar;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
@@ -175,7 +175,7 @@ public class TestPrimitiveStringifier {
@Test
public void testTimestampMillisStringifier() {
for (PrimitiveStringifier stringifier :
- asList(TIMESTAMP_MILLIS_STRINGIFIER,
TIMESTAMP_MILLIS_UTC_STRINGIFIER)) {
+ List.of(TIMESTAMP_MILLIS_STRINGIFIER,
TIMESTAMP_MILLIS_UTC_STRINGIFIER)) {
String timezoneAmendment = (stringifier == TIMESTAMP_MILLIS_STRINGIFIER
? "" : "+0000");
assertEquals(withZoneString("1970-01-01T00:00:00.000",
timezoneAmendment), stringifier.stringify(0l));
@@ -202,7 +202,7 @@ public class TestPrimitiveStringifier {
@Test
public void testTimestampMicrosStringifier() {
for (PrimitiveStringifier stringifier :
- asList(TIMESTAMP_MICROS_STRINGIFIER,
TIMESTAMP_MICROS_UTC_STRINGIFIER)) {
+ List.of(TIMESTAMP_MICROS_STRINGIFIER,
TIMESTAMP_MICROS_UTC_STRINGIFIER)) {
String timezoneAmendment = (stringifier == TIMESTAMP_MICROS_STRINGIFIER
? "" : "+0000");
assertEquals(withZoneString("1970-01-01T00:00:00.000000",
timezoneAmendment), stringifier.stringify(0l));
@@ -228,7 +228,7 @@ public class TestPrimitiveStringifier {
@Test
public void testTimestampNanosStringifier() {
- for (PrimitiveStringifier stringifier :
asList(TIMESTAMP_NANOS_STRINGIFIER, TIMESTAMP_NANOS_UTC_STRINGIFIER)) {
+ for (PrimitiveStringifier stringifier :
List.of(TIMESTAMP_NANOS_STRINGIFIER, TIMESTAMP_NANOS_UTC_STRINGIFIER)) {
String timezoneAmendment = (stringifier == TIMESTAMP_NANOS_STRINGIFIER ?
"" : "+0000");
assertEquals(withZoneString("1970-01-01T00:00:00.000000000",
timezoneAmendment), stringifier.stringify(0l));
@@ -254,7 +254,7 @@ public class TestPrimitiveStringifier {
@Test
public void testTimeStringifier() {
- for (PrimitiveStringifier stringifier : asList(TIME_STRINGIFIER,
TIME_UTC_STRINGIFIER)) {
+ for (PrimitiveStringifier stringifier : List.of(TIME_STRINGIFIER,
TIME_UTC_STRINGIFIER)) {
String timezoneAmendment = (stringifier == TIME_STRINGIFIER ? "" :
"+0000");
assertEquals(withZoneString("00:00:00.000", timezoneAmendment),
stringifier.stringify(0));
@@ -290,7 +290,7 @@ public class TestPrimitiveStringifier {
@Test
public void testTimeNanoStringifier() {
- for (PrimitiveStringifier stringifier : asList(TIME_NANOS_STRINGIFIER,
TIME_NANOS_UTC_STRINGIFIER)) {
+ for (PrimitiveStringifier stringifier : List.of(TIME_NANOS_STRINGIFIER,
TIME_NANOS_UTC_STRINGIFIER)) {
String timezoneAmendment = (stringifier == TIME_NANOS_STRINGIFIER ? "" :
"+0000");
assertEquals(withZoneString("00:00:00.000000000", timezoneAmendment),
stringifier.stringify(0l));
@@ -434,7 +434,7 @@ public class TestPrimitiveStringifier {
}
private void checkThrowingUnsupportedException(PrimitiveStringifier
stringifier, Class<?>... excludes) {
- Set<Class<?>> set = new HashSet<>(asList(excludes));
+ Set<Class<?>> set = new HashSet<>(List.of(excludes));
if (!set.contains(Integer.TYPE)) {
try {
stringifier.stringify(0);
diff --git
a/parquet-common/src/test/java/org/apache/parquet/SemanticVersionTest.java
b/parquet-common/src/test/java/org/apache/parquet/SemanticVersionTest.java
index 602ea374c..e29352d40 100644
--- a/parquet-common/src/test/java/org/apache/parquet/SemanticVersionTest.java
+++ b/parquet-common/src/test/java/org/apache/parquet/SemanticVersionTest.java
@@ -21,7 +21,6 @@ package org.apache.parquet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
-import java.util.Arrays;
import java.util.List;
import org.junit.Test;
@@ -51,7 +50,7 @@ public class SemanticVersionTest {
@Test
public void testSemverPrereleaseExamples() throws Exception {
- List<String> examples = Arrays.asList(
+ List<String> examples = List.of(
"1.0.0-alpha",
"1.0.0-alpha.1",
"1.0.0-alpha.beta",
diff --git
a/parquet-common/src/test/java/org/apache/parquet/bytes/TestConcatenatingByteBufferCollector.java
b/parquet-common/src/test/java/org/apache/parquet/bytes/TestConcatenatingByteBufferCollector.java
index 8b3a9caba..d973a7c96 100644
---
a/parquet-common/src/test/java/org/apache/parquet/bytes/TestConcatenatingByteBufferCollector.java
+++
b/parquet-common/src/test/java/org/apache/parquet/bytes/TestConcatenatingByteBufferCollector.java
@@ -24,7 +24,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
+import java.util.List;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
@@ -54,7 +54,7 @@ public class TestConcatenatingByteBufferCollector {
ConcatenatingByteBufferCollector inner = new
ConcatenatingByteBufferCollector(allocator)) {
outer.collect(BytesInput.concat(
BytesInput.from(byteBuffer("This"), byteBuffer(" "),
byteBuffer("is")),
- BytesInput.from(Arrays.asList(byteBuffer(" a"), byteBuffer(" "),
byteBuffer("test"))),
+ BytesInput.from(List.of(byteBuffer(" a"), byteBuffer(" "),
byteBuffer("test"))),
BytesInput.from(inputStream(" text to blabla"), 8),
BytesInput.from(bytes(" ")),
BytesInput.from(bytes("blabla validate blabla"), 7, 9),
diff --git
a/parquet-common/src/test/java/org/apache/parquet/bytes/TestMultiBufferInputStream.java
b/parquet-common/src/test/java/org/apache/parquet/bytes/TestMultiBufferInputStream.java
index a6ddfe794..2c1b1ff52 100644
---
a/parquet-common/src/test/java/org/apache/parquet/bytes/TestMultiBufferInputStream.java
+++
b/parquet-common/src/test/java/org/apache/parquet/bytes/TestMultiBufferInputStream.java
@@ -21,13 +21,12 @@ package org.apache.parquet.bytes;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
public class TestMultiBufferInputStream extends TestByteBufferInputStreams {
- private static final List<ByteBuffer> DATA = Arrays.asList(
+ private static final List<ByteBuffer> DATA = List.of(
ByteBuffer.wrap(new byte[] {0, 1, 2, 3, 4, 5, 6, 7, 8}),
ByteBuffer.wrap(new byte[] {9, 10, 11, 12}),
ByteBuffer.wrap(new byte[] {}),
diff --git a/parquet-common/src/test/java/org/apache/parquet/glob/TestGlob.java
b/parquet-common/src/test/java/org/apache/parquet/glob/TestGlob.java
index 34bf7f418..433cc360f 100644
--- a/parquet-common/src/test/java/org/apache/parquet/glob/TestGlob.java
+++ b/parquet-common/src/test/java/org/apache/parquet/glob/TestGlob.java
@@ -21,7 +21,7 @@ package org.apache.parquet.glob;
import static junit.framework.Assert.fail;
import static org.junit.Assert.assertEquals;
-import java.util.Arrays;
+import java.util.List;
import junit.framework.Assert;
import org.apache.parquet.Strings;
import org.apache.parquet.glob.GlobParser.GlobParseException;
@@ -31,28 +31,28 @@ public class TestGlob {
@Test
public void testNoGlobs() {
- assertEquals(Arrays.asList("foo"), Strings.expandGlob("foo"));
+ assertEquals(List.of("foo"), Strings.expandGlob("foo"));
}
@Test
public void testEmptyGroup() {
- assertEquals(Arrays.asList(""), Strings.expandGlob(""));
- assertEquals(Arrays.asList(""), Strings.expandGlob("{}"));
- assertEquals(Arrays.asList("a"), Strings.expandGlob("a{}"));
- assertEquals(Arrays.asList("ab"), Strings.expandGlob("a{}b"));
- assertEquals(Arrays.asList("a"), Strings.expandGlob("{}a"));
- assertEquals(Arrays.asList("a"), Strings.expandGlob("a{}"));
- assertEquals(Arrays.asList("", ""), Strings.expandGlob("{,}"));
- assertEquals(Arrays.asList("ab", "a", "ac"),
Strings.expandGlob("a{b,{},c}"));
+ assertEquals(List.of(""), Strings.expandGlob(""));
+ assertEquals(List.of(""), Strings.expandGlob("{}"));
+ assertEquals(List.of("a"), Strings.expandGlob("a{}"));
+ assertEquals(List.of("ab"), Strings.expandGlob("a{}b"));
+ assertEquals(List.of("a"), Strings.expandGlob("{}a"));
+ assertEquals(List.of("a"), Strings.expandGlob("a{}"));
+ assertEquals(List.of("", ""), Strings.expandGlob("{,}"));
+ assertEquals(List.of("ab", "a", "ac"), Strings.expandGlob("a{b,{},c}"));
}
@Test
public void testSingleLevel() {
- assertEquals(Arrays.asList("foobar", "foobaz"),
Strings.expandGlob("foo{bar,baz}"));
- assertEquals(Arrays.asList("startfooend", "startbarend"),
Strings.expandGlob("start{foo,bar}end"));
- assertEquals(Arrays.asList("fooend", "barend"),
Strings.expandGlob("{foo,bar}end"));
+ assertEquals(List.of("foobar", "foobaz"),
Strings.expandGlob("foo{bar,baz}"));
+ assertEquals(List.of("startfooend", "startbarend"),
Strings.expandGlob("start{foo,bar}end"));
+ assertEquals(List.of("fooend", "barend"),
Strings.expandGlob("{foo,bar}end"));
assertEquals(
- Arrays.asList(
+ List.of(
"startfooenda",
"startfooendb",
"startfooendc",
@@ -62,14 +62,14 @@ public class TestGlob {
"startbarendc",
"startbarendd"),
Strings.expandGlob("start{foo,bar}end{a,b,c,d}"));
- assertEquals(Arrays.asList("xa", "xb", "xc", "ya", "yb", "yc"),
Strings.expandGlob("{x,y}{a,b,c}"));
- assertEquals(Arrays.asList("x", "y", "z"), Strings.expandGlob("{x,y,z}"));
+ assertEquals(List.of("xa", "xb", "xc", "ya", "yb", "yc"),
Strings.expandGlob("{x,y}{a,b,c}"));
+ assertEquals(List.of("x", "y", "z"), Strings.expandGlob("{x,y,z}"));
}
@Test
public void testNested() {
assertEquals(
- Arrays.asList(
+ List.of(
"startoneend",
"startpretwopostend",
"startprethreepostend",
@@ -84,9 +84,9 @@ public class TestGlob {
@Test
public void testExtraBraces() {
- assertEquals(Arrays.asList("x", "y", "z"),
Strings.expandGlob("{{x,y,z}}"));
- assertEquals(Arrays.asList("x", "y", "z"),
Strings.expandGlob("{{{x,y,z}}}"));
- assertEquals(Arrays.asList("startx", "starta", "startb", "starty"),
Strings.expandGlob("start{x,{a,b},y}"));
+ assertEquals(List.of("x", "y", "z"), Strings.expandGlob("{{x,y,z}}"));
+ assertEquals(List.of("x", "y", "z"), Strings.expandGlob("{{{x,y,z}}}"));
+ assertEquals(List.of("startx", "starta", "startb", "starty"),
Strings.expandGlob("start{x,{a,b},y}"));
}
@Test
@@ -102,17 +102,17 @@ public class TestGlob {
@Test
public void testCommaCornerCases() {
// single empty string in each location
- assertEquals(Arrays.asList("foobar", "foo", "foobaz"),
Strings.expandGlob("foo{bar,,baz}"));
- assertEquals(Arrays.asList("foo", "foobar", "foobaz"),
Strings.expandGlob("foo{,bar,baz}"));
- assertEquals(Arrays.asList("foobar", "foobaz", "foo"),
Strings.expandGlob("foo{bar,baz,}"));
+ assertEquals(List.of("foobar", "foo", "foobaz"),
Strings.expandGlob("foo{bar,,baz}"));
+ assertEquals(List.of("foo", "foobar", "foobaz"),
Strings.expandGlob("foo{,bar,baz}"));
+ assertEquals(List.of("foobar", "foobaz", "foo"),
Strings.expandGlob("foo{bar,baz,}"));
// multiple empty strings
- assertEquals(Arrays.asList("foobar", "foo", "foo", "foobaz"),
Strings.expandGlob("foo{bar,,,baz}"));
- assertEquals(Arrays.asList("foo", "foo", "foobar", "foobaz"),
Strings.expandGlob("foo{,,bar,baz}"));
- assertEquals(Arrays.asList("foobar", "foobaz", "foo", "foo"),
Strings.expandGlob("foo{bar,baz,,}"));
+ assertEquals(List.of("foobar", "foo", "foo", "foobaz"),
Strings.expandGlob("foo{bar,,,baz}"));
+ assertEquals(List.of("foo", "foo", "foobar", "foobaz"),
Strings.expandGlob("foo{,,bar,baz}"));
+ assertEquals(List.of("foobar", "foobaz", "foo", "foo"),
Strings.expandGlob("foo{bar,baz,,}"));
// between groups
- assertEquals(Arrays.asList("x", "y", "", "a", "b"),
Strings.expandGlob("{{x,y},,{a,b}}"));
+ assertEquals(List.of("x", "y", "", "a", "b"),
Strings.expandGlob("{{x,y},,{a,b}}"));
}
private void assertNotEnoughCloseBraces(String s) {
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
index 002028cdf..60150439a 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/format/converter/ParquetMetadataConverter.java
@@ -1052,12 +1052,12 @@ public class ParquetMetadataConverter {
UNKNOWN
}
- private static final Set<Class> STRING_TYPES =
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
+ private static final Set<Class> STRING_TYPES = Set.of(
LogicalTypeAnnotation.StringLogicalTypeAnnotation.class,
LogicalTypeAnnotation.EnumLogicalTypeAnnotation.class,
LogicalTypeAnnotation.JsonLogicalTypeAnnotation.class,
LogicalTypeAnnotation.Float16LogicalTypeAnnotation.class,
- LogicalTypeAnnotation.UnknownLogicalTypeAnnotation.class)));
+ LogicalTypeAnnotation.UnknownLogicalTypeAnnotation.class);
/**
* Returns whether to use signed order min and max with a type. It is safe to
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/PrintFooter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/PrintFooter.java
index 64153893d..d50bccd03 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/PrintFooter.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/PrintFooter.java
@@ -23,7 +23,6 @@ import static
org.apache.parquet.hadoop.ParquetFileWriter.PARQUET_METADATA_FILE;
import java.net.URI;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Deque;
import java.util.LinkedHashMap;
@@ -77,7 +76,7 @@ public class PrintFooter {
List<FileStatus> statuses;
if (fileStatus.isDir()) {
System.out.println("listing files in " + fileStatus.getPath());
- statuses = Arrays.asList(fs.listStatus(fileStatus.getPath(),
HiddenFileFilter.INSTANCE));
+ statuses = List.of(fs.listStatus(fileStatus.getPath(),
HiddenFileFilter.INSTANCE));
} else {
statuses = new ArrayList<FileStatus>();
statuses.add(fileStatus);
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/mapred/DeprecatedParquetInputFormat.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/mapred/DeprecatedParquetInputFormat.java
index f10c574c4..f3e822528 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/mapred/DeprecatedParquetInputFormat.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/mapred/DeprecatedParquetInputFormat.java
@@ -19,7 +19,6 @@
package org.apache.parquet.hadoop.mapred;
import static java.lang.Boolean.TRUE;
-import static java.util.Arrays.asList;
import java.io.DataInput;
import java.io.DataOutput;
@@ -65,7 +64,7 @@ public class DeprecatedParquetInputFormat<V> extends
org.apache.hadoop.mapred.Fi
}
public List<Footer> getFooters(JobConf job) throws IOException {
- return realInputFormat.getFooters(job, asList(super.listStatus(job)));
+ return realInputFormat.getFooters(job, List.of(super.listStatus(job)));
}
private static class RecordReaderWrapper<V> implements RecordReader<Void,
Container<V>> {
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/crypto/keytools/samples/VaultClient.java
b/parquet-hadoop/src/test/java/org/apache/parquet/crypto/keytools/samples/VaultClient.java
index 1c8b2e1f9..fcaac1176 100755
---
a/parquet-hadoop/src/test/java/org/apache/parquet/crypto/keytools/samples/VaultClient.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/crypto/keytools/samples/VaultClient.java
@@ -20,9 +20,9 @@ package org.apache.parquet.crypto.keytools.samples;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Base64;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import okhttp3.ConnectionSpec;
import okhttp3.MediaType;
@@ -56,7 +56,7 @@ public class VaultClient implements KmsClient {
private String endPointPrefix;
private OkHttpClient httpClient = new OkHttpClient.Builder()
- .connectionSpecs(Arrays.asList(ConnectionSpec.MODERN_TLS,
ConnectionSpec.COMPATIBLE_TLS))
+ .connectionSpecs(List.of(ConnectionSpec.MODERN_TLS,
ConnectionSpec.COMPATIBLE_TLS))
.build();
@Override
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
b/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
index f2e6e16fc..c35b13f8f 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/encodings/FileEncodingsIT.java
@@ -23,7 +23,6 @@ import static junit.framework.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
@@ -105,7 +104,7 @@ public class FileEncodingsIT {
@Parameterized.Parameters
public static Collection<Object[]> getParameters() {
- List<PrimitiveTypeName> types = Arrays.asList(
+ List<PrimitiveTypeName> types = List.of(
PrimitiveTypeName.BOOLEAN,
PrimitiveTypeName.INT32,
PrimitiveTypeName.INT64,
@@ -124,7 +123,7 @@ public class FileEncodingsIT {
}
} else {
// otherwise test just UNCOMPRESSED
- codecs = Arrays.asList(CompressionCodecName.UNCOMPRESSED);
+ codecs = List.of(CompressionCodecName.UNCOMPRESSED);
}
System.err.println("Testing codecs: " + codecs);
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/compat/TestRowGroupFilter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/compat/TestRowGroupFilter.java
index 18c8ca0ae..80bdda417 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/compat/TestRowGroupFilter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/compat/TestRowGroupFilter.java
@@ -27,7 +27,6 @@ import static
org.apache.parquet.hadoop.TestInputFormat.makeBlockFromStats;
import static org.junit.Assert.assertEquals;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -88,36 +87,36 @@ public class TestRowGroupFilter {
set1.add(10);
set1.add(50);
List<BlockMetaData> filtered =
RowGroupFilter.filterRowGroups(FilterCompat.get(in(foo, set1)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b5), filtered);
+ assertEquals(List.of(b1, b2, b5), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(notIn(foo,
set1)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b4, b5, b6), filtered);
Set<Integer> set2 = new HashSet<>();
set2.add(null);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(in(foo, set2)),
blocks, schema);
- assertEquals(Arrays.asList(b1, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b3, b4, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(notIn(foo,
set2)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b4, b5, b6), filtered);
set2.add(8);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(in(foo, set2)),
blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b4, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(notIn(foo,
set2)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b4, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(eq(foo, 50)),
blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b5), filtered);
+ assertEquals(List.of(b1, b2, b5), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(notEq(foo,
50)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b4, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(eq(foo, null)),
blocks, schema);
- assertEquals(Arrays.asList(b1, b3, b4, b5, b6), filtered);
+ assertEquals(List.of(b1, b3, b4, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(notEq(foo,
null)), blocks, schema);
- assertEquals(Arrays.asList(b1, b2, b3, b5, b6), filtered);
+ assertEquals(List.of(b1, b2, b3, b5, b6), filtered);
filtered = RowGroupFilter.filterRowGroups(FilterCompat.get(eq(foo, 0)),
blocks, schema);
- assertEquals(Arrays.asList(b6), filtered);
+ assertEquals(List.of(b6), filtered);
}
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilterTest.java
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilterTest.java
index f5f414c86..20ce8c9d5 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilterTest.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilterTest.java
@@ -53,7 +53,6 @@ import com.google.common.primitives.Ints;
import java.io.IOException;
import java.io.Serializable;
import java.math.BigInteger;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -288,7 +287,7 @@ public class DictionaryFilterTest {
@SuppressWarnings("deprecation")
private void testDictionaryEncodedColumnsV1() throws Exception {
- Set<String> dictionaryEncodedColumns = new HashSet<String>(Arrays.asList(
+ Set<String> dictionaryEncodedColumns = new HashSet<String>(List.of(
"binary_field",
"single_value_field",
"optional_single_value_field",
@@ -326,7 +325,7 @@ public class DictionaryFilterTest {
}
private void testDictionaryEncodedColumnsV2() throws Exception {
- Set<String> dictionaryEncodedColumns = new HashSet<String>(Arrays.asList(
+ Set<String> dictionaryEncodedColumns = new HashSet<String>(List.of(
"binary_field",
"single_value_field",
"optional_single_value_field",
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/recordlevel/TestRecordLevelFilters.java
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/recordlevel/TestRecordLevelFilters.java
index 1a1a31e73..0c03f548b 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/recordlevel/TestRecordLevelFilters.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/recordlevel/TestRecordLevelFilters.java
@@ -42,7 +42,6 @@ import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -87,27 +86,27 @@ public class TestRecordLevelFilters {
"business", 2000.0D,
"retirement", 1000.0D)));
- users.add(new User(20, "thing1", Arrays.asList(new
PhoneNumber(5555555555L, null)), null));
+ users.add(new User(20, "thing1", List.of(new PhoneNumber(5555555555L,
null)), null));
users.add(new User(
27,
"thing2",
- Arrays.asList(new PhoneNumber(1111111111L, "home"), new
PhoneNumber(2222222222L, "cell")),
+ List.of(new PhoneNumber(1111111111L, "home"), new
PhoneNumber(2222222222L, "cell")),
null));
users.add(new User(
28,
"popular",
- Arrays.asList(
+ List.of(
new PhoneNumber(1111111111L, "home"),
new PhoneNumber(1111111111L, "apartment"),
new PhoneNumber(2222222222L, null),
new PhoneNumber(3333333333L, "mobile")),
null));
- users.add(new User(30, null, Arrays.asList(new PhoneNumber(1111111111L,
"home")), null));
+ users.add(new User(30, null, List.of(new PhoneNumber(1111111111L,
"home")), null));
- users.add(new User(31, null, Arrays.asList(new PhoneNumber(2222222222L,
"business")), null));
+ users.add(new User(31, null, List.of(new PhoneNumber(2222222222L,
"business")), null));
for (int i = 100; i < 200; i++) {
Location location = null;
@@ -117,7 +116,7 @@ public class TestRecordLevelFilters {
if (i % 3 == 2) {
location = new Location((double) i, null);
}
- users.add(new User(i, "p" + i, Arrays.asList(new PhoneNumber(i,
"cell")), location));
+ users.add(new User(i, "p" + i, List.of(new PhoneNumber(i, "cell")),
location));
}
return users;
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/statisticslevel/TestStatisticsFilter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/statisticslevel/TestStatisticsFilter.java
index 15d0a8ab1..6e1f267e8 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/filter2/statisticslevel/TestStatisticsFilter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/filter2/statisticslevel/TestStatisticsFilter.java
@@ -41,7 +41,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -72,7 +71,7 @@ public class TestStatisticsFilter {
ColumnPath.get("int", "column"),
PrimitiveTypeName.INT32,
CompressionCodecName.GZIP,
- new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
+ new HashSet<Encoding>(List.of(Encoding.PLAIN)),
stats,
0L,
0L,
@@ -87,7 +86,7 @@ public class TestStatisticsFilter {
ColumnPath.get("double", "column"),
PrimitiveTypeName.DOUBLE,
CompressionCodecName.GZIP,
- new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
+ new HashSet<Encoding>(List.of(Encoding.PLAIN)),
stats,
0L,
0L,
@@ -122,13 +121,13 @@ public class TestStatisticsFilter {
}
private static final List<ColumnChunkMetaData> columnMetas =
- Arrays.asList(getIntColumnMeta(intStats, 177L),
getDoubleColumnMeta(doubleStats, 177L));
+ List.of(getIntColumnMeta(intStats, 177L),
getDoubleColumnMeta(doubleStats, 177L));
- private static final List<ColumnChunkMetaData> nullColumnMetas =
Arrays.asList(
+ private static final List<ColumnChunkMetaData> nullColumnMetas = List.of(
getIntColumnMeta(nullIntStats, 177L), // column of all nulls
getDoubleColumnMeta(doubleStats, 177L));
- private static final List<ColumnChunkMetaData> missingMinMaxColumnMetas =
Arrays.asList(
+ private static final List<ColumnChunkMetaData> missingMinMaxColumnMetas =
List.of(
getIntColumnMeta(emptyIntStats, 177L), // missing min/max values and
numNulls => stats is empty
getDoubleColumnMeta(missingMinMaxDoubleStats, 177L)); // missing
min/max, some null values
@@ -159,11 +158,11 @@ public class TestStatisticsFilter {
assertTrue(canDrop(
eq(intColumn, null),
- Arrays.asList(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
eq(intColumn, null),
- Arrays.asList(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(eq(missingColumn, null), columnMetas));
@@ -182,17 +181,17 @@ public class TestStatisticsFilter {
allSevens.setMinMax(7, 7);
assertTrue(canDrop(
notEq(intColumn, 7),
- Arrays.asList(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
allSevens.setNumNulls(100L);
assertFalse(canDrop(
notEq(intColumn, 7),
- Arrays.asList(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
allSevens.setNumNulls(177L);
assertFalse(canDrop(
notEq(intColumn, 7),
- Arrays.asList(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(allSevens, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(notEq(missingColumn, fromString("any")), columnMetas));
@@ -216,15 +215,15 @@ public class TestStatisticsFilter {
assertFalse(canDrop(
notEq(intColumn, null),
- Arrays.asList(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
notEq(intColumn, null),
- Arrays.asList(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(
notEq(intColumn, null),
- Arrays.asList(getIntColumnMeta(statsAllNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsAllNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(notEq(missingColumn, null), columnMetas));
@@ -368,19 +367,19 @@ public class TestStatisticsFilter {
values9.add(null);
assertTrue(canDrop(
in(intColumn, values9),
- Arrays.asList(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
notIn(intColumn, values9),
- Arrays.asList(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsNoNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
in(intColumn, values9),
- Arrays.asList(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
notIn(intColumn, values9),
- Arrays.asList(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(statsSomeNulls, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
}
@Test
@@ -510,82 +509,73 @@ public class TestStatisticsFilter {
IntStatistics neither = new IntStatistics();
neither.setMinMax(1, 2);
- assertTrue(canDrop(pred, Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertTrue(canDrop(pred, List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
- assertFalse(
- canDrop(pred, Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertFalse(canDrop(pred, List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
- assertFalse(
- canDrop(pred, Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertFalse(canDrop(pred, List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
- assertFalse(
- canDrop(invPred, Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertFalse(canDrop(invPred, List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
- assertTrue(
- canDrop(invPred, Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertTrue(canDrop(invPred, List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
- assertFalse(canDrop(
- invPred, Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ assertFalse(canDrop(invPred, List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// udpDropMissingColumn drops null column.
assertTrue(canDrop(
- udpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ udpDropMissingColumn, List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(
- udpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ udpDropMissingColumn, List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(
udpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// invUdpDropMissingColumn (i.e., not(udpDropMissingColumn)) keeps null
column.
assertFalse(canDrop(
invUdpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
invUdpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
invUdpDropMissingColumn,
- Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// udpKeepMissingColumn keeps null column.
assertFalse(canDrop(
- udpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ udpKeepMissingColumn, List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
- udpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ udpKeepMissingColumn, List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(
udpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
// invUdpKeepMissingColumn (i.e., not(udpKeepMissingColumn)) drops null
column.
assertTrue(canDrop(
invUdpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(seven, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(
invUdpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(eight, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertTrue(canDrop(
invUdpKeepMissingColumn,
- Arrays.asList(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
+ List.of(getIntColumnMeta(neither, 177L),
getDoubleColumnMeta(doubleStats, 177L))));
assertFalse(canDrop(allPositivePred, missingMinMaxColumnMetas));
}
@Test
public void testClearExceptionForNots() {
- List<ColumnChunkMetaData> columnMetas = Arrays.asList(
- getDoubleColumnMeta(new DoubleStatistics(), 0L), getIntColumnMeta(new
IntStatistics(), 0L));
+ List<ColumnChunkMetaData> columnMetas =
+ List.of(getDoubleColumnMeta(new DoubleStatistics(), 0L),
getIntColumnMeta(new IntStatistics(), 0L));
FilterPredicate pred = and(not(eq(doubleColumn, 12.0)), eq(intColumn, 17));
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/format/converter/TestParquetMetadataConverter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/format/converter/TestParquetMetadataConverter.java
index 2529f06ad..264017a1f 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/format/converter/TestParquetMetadataConverter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/format/converter/TestParquetMetadataConverter.java
@@ -531,7 +531,7 @@ public class TestParquetMetadataConverter {
size * 2,
size,
offset));
- rowGroups.add(new RowGroup(Arrays.asList(columnChunk), size, 1));
+ rowGroups.add(new RowGroup(List.of(columnChunk), size, 1));
offset += size;
}
return new FileMetaData(1, schema, sizes.length, rowGroups);
@@ -722,12 +722,12 @@ public class TestParquetMetadataConverter {
public void testEncodingsCache() {
ParquetMetadataConverter parquetMetadataConverter = new
ParquetMetadataConverter();
- List<org.apache.parquet.format.Encoding> formatEncodingsCopy1 =
Arrays.asList(
+ List<org.apache.parquet.format.Encoding> formatEncodingsCopy1 = List.of(
org.apache.parquet.format.Encoding.BIT_PACKED,
org.apache.parquet.format.Encoding.RLE_DICTIONARY,
org.apache.parquet.format.Encoding.DELTA_LENGTH_BYTE_ARRAY);
- List<org.apache.parquet.format.Encoding> formatEncodingsCopy2 =
Arrays.asList(
+ List<org.apache.parquet.format.Encoding> formatEncodingsCopy2 = List.of(
org.apache.parquet.format.Encoding.BIT_PACKED,
org.apache.parquet.format.Encoding.RLE_DICTIONARY,
org.apache.parquet.format.Encoding.DELTA_LENGTH_BYTE_ARRAY);
@@ -1451,14 +1451,14 @@ public class TestParquetMetadataConverter {
ParquetMetadataConverter.toParquetColumnIndex(type, builder.build());
ColumnIndex columnIndex =
ParquetMetadataConverter.fromParquetColumnIndex(type, parquetColumnIndex);
assertEquals(BoundaryOrder.ASCENDING, columnIndex.getBoundaryOrder());
- assertTrue(Arrays.asList(false, true,
false).equals(columnIndex.getNullPages()));
- assertTrue(Arrays.asList(16l, 111l,
0l).equals(columnIndex.getNullCounts()));
- assertTrue(Arrays.asList(
+ assertTrue(List.of(false, true,
false).equals(columnIndex.getNullPages()));
+ assertTrue(List.of(16l, 111l, 0l).equals(columnIndex.getNullCounts()));
+ assertTrue(List.of(
ByteBuffer.wrap(BytesUtils.longToBytes(-100l)),
ByteBuffer.allocate(0),
ByteBuffer.wrap(BytesUtils.longToBytes(200l)))
.equals(columnIndex.getMinValues()));
- assertTrue(Arrays.asList(
+ assertTrue(List.of(
ByteBuffer.wrap(BytesUtils.longToBytes(100l)),
ByteBuffer.allocate(0),
ByteBuffer.wrap(BytesUtils.longToBytes(500l)))
@@ -1656,8 +1656,8 @@ public class TestParquetMetadataConverter {
@Test
public void testSizeStatisticsConversion() {
PrimitiveType type =
Types.required(PrimitiveTypeName.BINARY).named("test");
- List<Long> repLevelHistogram = Arrays.asList(1L, 2L, 3L, 4L, 5L);
- List<Long> defLevelHistogram = Arrays.asList(6L, 7L, 8L, 9L, 10L);
+ List<Long> repLevelHistogram = List.of(1L, 2L, 3L, 4L, 5L);
+ List<Long> defLevelHistogram = List.of(6L, 7L, 8L, 9L, 10L);
SizeStatistics sizeStatistics =
ParquetMetadataConverter.fromParquetSizeStatistics(
ParquetMetadataConverter.toParquetSizeStatistics(
new SizeStatistics(type, 1024, repLevelHistogram,
defLevelHistogram)),
@@ -1820,7 +1820,7 @@ public class TestParquetMetadataConverter {
);
// Create GeospatialTypes with some example type values
- Set<Integer> types = new HashSet<>(Arrays.asList(1, 2, 3));
+ Set<Integer> types = new HashSet<>(List.of(1, 2, 3));
GeospatialTypes geospatialTypes = new GeospatialTypes(types);
// Create GeospatialStatistics with the bbox and types
@@ -1891,7 +1891,7 @@ public class TestParquetMetadataConverter {
ParquetMetadataConverter converter = new ParquetMetadataConverter();
// Create GeospatialStatistics with null bbox but valid types
- Set<Integer> types = new HashSet<>(Arrays.asList(1, 2, 3));
+ Set<Integer> types = new HashSet<>(List.of(1, 2, 3));
GeospatialTypes geospatialTypes = new GeospatialTypes(types);
org.apache.parquet.column.statistics.geospatial.GeospatialStatistics
origStats =
new
org.apache.parquet.column.statistics.geospatial.GeospatialStatistics(null,
geospatialTypes);
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestBloomFiltering.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestBloomFiltering.java
index 651184a0d..4ca75c347 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestBloomFiltering.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestBloomFiltering.java
@@ -35,7 +35,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -113,7 +112,7 @@ public class TestBloomFiltering {
@Parameterized.Parameters(name = "Run {index}: isEncrypted={1}")
public static Collection<Object[]> params() {
- return Arrays.asList(
+ return List.of(
new Object[] {FILE_V1, false /*isEncrypted*/},
new Object[] {FILE_V2, false /*isEncrypted*/},
new Object[] {FILE_V1_E, true /*isEncrypted*/},
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestColumnIndexFiltering.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestColumnIndexFiltering.java
index 154dd6f5c..81883fb45 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestColumnIndexFiltering.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestColumnIndexFiltering.java
@@ -49,7 +49,6 @@ import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
@@ -137,7 +136,7 @@ public class TestColumnIndexFiltering {
@Parameters(name = "Run {index}: isEncrypted={1}")
public static Collection<Object[]> params() {
- return Arrays.asList(
+ return List.of(
new Object[] {FILE_V1, false /*isEncrypted*/},
new Object[] {FILE_V2, false /*isEncrypted*/},
new Object[] {FILE_V1_E, true /*isEncrypted*/},
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDataPageChecksums.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDataPageChecksums.java
index 013498c2b..5ee237905 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDataPageChecksums.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestDataPageChecksums.java
@@ -320,7 +320,7 @@ public class TestDataPageChecksums {
Path path = writeSimpleParquetFile(conf,
CompressionCodecName.UNCOMPRESSED, version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
DataPage colAPage1 = readNextPage(colADesc, pageReadStore);
@@ -361,7 +361,7 @@ public class TestDataPageChecksums {
Path path = writeSimpleParquetFile(conf,
CompressionCodecName.UNCOMPRESSED, version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
assertCrcNotSet(readNextPage(colADesc, pageReadStore));
@@ -392,7 +392,7 @@ public class TestDataPageChecksums {
Path path = writeSimpleParquetFile(conf,
CompressionCodecName.UNCOMPRESSED, version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
assertCorrectContent(getPageBytes(readNextPage(colADesc,
pageReadStore)), colAPage1Bytes);
@@ -423,7 +423,7 @@ public class TestDataPageChecksums {
Path path = writeSimpleParquetFile(conf,
CompressionCodecName.UNCOMPRESSED, version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
DataPage colAPage1 = readNextPage(colADesc, pageReadStore);
@@ -484,7 +484,7 @@ public class TestDataPageChecksums {
// First we disable checksum verification, the corruption will go
undetected as it is in the
// data section of the page
conf.setBoolean(ParquetInputFormat.PAGE_VERIFY_CHECKSUM_ENABLED,
false);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
DataPage colAPage1 = readNextPage(colADesc, pageReadStore);
@@ -499,7 +499,7 @@ public class TestDataPageChecksums {
// Now we enable checksum verification, the corruption should be
detected
conf.setBoolean(ParquetInputFormat.PAGE_VERIFY_CHECKSUM_ENABLED, true);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
// We expect an exception on the first encountered corrupt page (in
readAllPages)
assertVerificationFailed(reader);
}
@@ -528,7 +528,7 @@ public class TestDataPageChecksums {
Path path = writeSimpleParquetFile(conf, CompressionCodecName.SNAPPY,
version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colADesc, colBDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colADesc, colBDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
DataPage colAPage1 = readNextPage(colADesc, pageReadStore);
@@ -574,8 +574,7 @@ public class TestDataPageChecksums {
conf.setBoolean(ParquetInputFormat.PAGE_VERIFY_CHECKSUM_ENABLED, false);
Path refPath = writeNestedWithNullsSampleParquetFile(conf, false,
CompressionCodecName.SNAPPY, version);
- try (ParquetFileReader refReader =
- getParquetFileReader(refPath, conf, Arrays.asList(colCIdDesc,
colDValDesc))) {
+ try (ParquetFileReader refReader = getParquetFileReader(refPath, conf,
List.of(colCIdDesc, colDValDesc))) {
PageReadStore refPageReadStore = refReader.readNextRowGroup();
byte[] colCIdPageBytes = getPageBytes(readNextPage(colCIdDesc,
refPageReadStore));
byte[] colDValPageBytes = getPageBytes(readNextPage(colDValDesc,
refPageReadStore));
@@ -585,7 +584,7 @@ public class TestDataPageChecksums {
conf.setBoolean(ParquetInputFormat.PAGE_VERIFY_CHECKSUM_ENABLED, true);
Path path = writeNestedWithNullsSampleParquetFile(conf, false,
CompressionCodecName.SNAPPY, version);
- try (ParquetFileReader reader = getParquetFileReader(path, conf,
Arrays.asList(colCIdDesc, colDValDesc))) {
+ try (ParquetFileReader reader = getParquetFileReader(path, conf,
List.of(colCIdDesc, colDValDesc))) {
PageReadStore pageReadStore = reader.readNextRowGroup();
DataPage colCIdPage = readNextPage(colCIdDesc, pageReadStore);
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormat.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormat.java
index 0de2d1d40..40bd9b0ee 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormat.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormat.java
@@ -344,7 +344,7 @@ public class TestInputFormat {
ColumnPath.get("foo"),
PrimitiveTypeName.INT32,
CompressionCodecName.GZIP,
- new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
+ new HashSet<Encoding>(List.of(Encoding.PLAIN)),
stats,
100l,
100l,
@@ -566,7 +566,7 @@ public class TestInputFormat {
ColumnPath.get("foo"),
PrimitiveTypeName.BINARY,
CompressionCodecName.GZIP,
- new HashSet<Encoding>(Arrays.asList(Encoding.PLAIN)),
+ new HashSet<Encoding>(List.of(Encoding.PLAIN)),
new BinaryStatistics(),
start,
0l,
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormatColumnProjection.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormatColumnProjection.java
index 7fe206b87..4fbf9d2df 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormatColumnProjection.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestInputFormatColumnProjection.java
@@ -25,7 +25,6 @@ import static
org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
-import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.Configuration;
@@ -110,7 +109,7 @@ public class TestInputFormatColumnProjection {
@Parameterized.Parameters(name = "vectored : {0}")
public static List<Boolean> params() {
- return Arrays.asList(true, false);
+ return List.of(true, false);
}
/**
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestMergeMetadataFiles.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestMergeMetadataFiles.java
index 2caf07b15..847074c14 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestMergeMetadataFiles.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestMergeMetadataFiles.java
@@ -26,7 +26,6 @@ import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -184,9 +183,9 @@ public class TestMergeMetadataFiles {
// test file serialization
Path mergedOut = new Path(new File(temp.getRoot(),
"merged_meta").getAbsolutePath());
Path mergedCommonOut = new Path(new File(temp.getRoot(),
"merged_common_meta").getAbsolutePath());
- ParquetFileWriter.writeMergedMetadataFile(Arrays.asList(info.metaPath1,
info.metaPath2), mergedOut, info.conf);
+ ParquetFileWriter.writeMergedMetadataFile(List.of(info.metaPath1,
info.metaPath2), mergedOut, info.conf);
ParquetFileWriter.writeMergedMetadataFile(
- Arrays.asList(info.commonMetaPath1, info.commonMetaPath2),
mergedCommonOut, info.conf);
+ List.of(info.commonMetaPath1, info.commonMetaPath2), mergedCommonOut,
info.conf);
ParquetMetadata mergedMeta =
ParquetFileReader.readFooter(info.conf, mergedOut,
ParquetMetadataConverter.NO_FILTER);
@@ -222,8 +221,7 @@ public class TestMergeMetadataFiles {
Path mergedCommonOut = new Path(new File(temp.getRoot(),
"merged_common_meta").getAbsolutePath());
try {
- ParquetFileWriter.writeMergedMetadataFile(
- Arrays.asList(info.metaPath1, info.metaPath2), mergedOut, info.conf);
+ ParquetFileWriter.writeMergedMetadataFile(List.of(info.metaPath1,
info.metaPath2), mergedOut, info.conf);
fail("this should throw");
} catch (RuntimeException e) {
boolean eq1 =
@@ -236,7 +234,7 @@ public class TestMergeMetadataFiles {
try {
ParquetFileWriter.writeMergedMetadataFile(
- Arrays.asList(info.commonMetaPath1, info.commonMetaPath2),
mergedCommonOut, info.conf);
+ List.of(info.commonMetaPath1, info.commonMetaPath2),
mergedCommonOut, info.conf);
fail("this should throw");
} catch (RuntimeException e) {
boolean eq1 =
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
index 4269cf516..ca03ef4db 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetFileWriter.java
@@ -46,7 +46,6 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -161,7 +160,7 @@ public class TestParquetFileWriter {
@Parameterized.Parameters(name = "vectored : {0}")
public static List<Boolean> params() {
- return Arrays.asList(true, false);
+ return List.of(true, false);
}
/**
@@ -312,8 +311,8 @@ public class TestParquetFileWriter {
configuration,
readFooter.getFileMetaData(),
path,
- Arrays.asList(rowGroup),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1)))) {
+ List.of(rowGroup),
+ List.of(SCHEMA.getColumnDescription(PATH1)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
@@ -328,7 +327,7 @@ public class TestParquetFileWriter {
readFooter.getFileMetaData(),
path,
readFooter.getBlocks(),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
+ List.of(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
@@ -478,8 +477,8 @@ public class TestParquetFileWriter {
configuration,
readFooter.getFileMetaData(),
path,
- Arrays.asList(readFooter.getBlocks().get(0)),
- Arrays.asList(schema.getColumnDescription(colPath)))) {
+ List.of(readFooter.getBlocks().get(0)),
+ List.of(schema.getColumnDescription(colPath)))) {
BloomFilterReader bloomFilterReader =
r.getBloomFilterDataReader(readFooter.getBlocks().get(0));
BloomFilter bloomFilter = bloomFilterReader.readBloomFilter(
@@ -552,7 +551,7 @@ public class TestParquetFileWriter {
readFooter.getFileMetaData(),
path,
readFooter.getBlocks(),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
+ List.of(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
PageReadStore pages = reader.readNextRowGroup();
assertEquals(14, pages.getRowCount());
validateV2Page(
@@ -691,8 +690,8 @@ public class TestParquetFileWriter {
conf,
readFooter.getFileMetaData(),
path,
- Arrays.asList(readFooter.getBlocks().get(0)),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1)))) {
+ List.of(readFooter.getBlocks().get(0)),
+ List.of(SCHEMA.getColumnDescription(PATH1)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
@@ -707,7 +706,7 @@ public class TestParquetFileWriter {
readFooter.getFileMetaData(),
path,
readFooter.getBlocks(),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
+ List.of(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
@@ -819,8 +818,8 @@ public class TestParquetFileWriter {
conf,
readFooter.getFileMetaData(),
path,
- Arrays.asList(readFooter.getBlocks().get(0)),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1)))) {
+ List.of(readFooter.getBlocks().get(0)),
+ List.of(SCHEMA.getColumnDescription(PATH1)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
@@ -835,7 +834,7 @@ public class TestParquetFileWriter {
readFooter.getFileMetaData(),
path,
readFooter.getBlocks(),
- Arrays.asList(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
+ List.of(SCHEMA.getColumnDescription(PATH1),
SCHEMA.getColumnDescription(PATH2)))) {
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
@@ -1011,14 +1010,14 @@ public class TestParquetFileWriter {
validateFooters(metadata);
footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(
- configuration, Arrays.asList(fs.listStatus(testDirPath,
HiddenFileFilter.INSTANCE)), false);
+ configuration, List.of(fs.listStatus(testDirPath,
HiddenFileFilter.INSTANCE)), false);
validateFooters(footers);
fs.delete(metadataFile.getPath(), false);
fs.delete(metadataFileLight.getPath(), false);
footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(
- configuration, Arrays.asList(fs.listStatus(testDirPath)), false);
+ configuration, List.of(fs.listStatus(testDirPath)), false);
validateFooters(footers);
}
@@ -1351,8 +1350,8 @@ public class TestParquetFileWriter {
ColumnIndex columnIndex =
reader.readColumnIndex(blockMeta.getColumns().get(0));
assertEquals(BoundaryOrder.ASCENDING, columnIndex.getBoundaryOrder());
- assertTrue(Arrays.asList(1l, 0l).equals(columnIndex.getNullCounts()));
- assertTrue(Arrays.asList(false,
false).equals(columnIndex.getNullPages()));
+ assertTrue(List.of(1l, 0l).equals(columnIndex.getNullCounts()));
+ assertTrue(List.of(false, false).equals(columnIndex.getNullPages()));
List<ByteBuffer> minValues = columnIndex.getMinValues();
assertEquals(2, minValues.size());
List<ByteBuffer> maxValues = columnIndex.getMaxValues();
@@ -1364,8 +1363,8 @@ public class TestParquetFileWriter {
columnIndex = reader.readColumnIndex(blockMeta.getColumns().get(1));
assertEquals(BoundaryOrder.DESCENDING, columnIndex.getBoundaryOrder());
- assertTrue(Arrays.asList(0l, 3l,
0l).equals(columnIndex.getNullCounts()));
- assertTrue(Arrays.asList(false, true,
false).equals(columnIndex.getNullPages()));
+ assertTrue(List.of(0l, 3l, 0l).equals(columnIndex.getNullCounts()));
+ assertTrue(List.of(false, true,
false).equals(columnIndex.getNullPages()));
minValues = columnIndex.getMinValues();
assertEquals(3, minValues.size());
maxValues = columnIndex.getMaxValues();
@@ -1407,8 +1406,8 @@ public class TestParquetFileWriter {
assertNotNull(reader.readColumnIndex(blockMeta.getColumns().get(0)));
columnIndex = reader.readColumnIndex(blockMeta.getColumns().get(0));
assertEquals(BoundaryOrder.ASCENDING, columnIndex.getBoundaryOrder());
- assertTrue(Arrays.asList(0l).equals(columnIndex.getNullCounts()));
- assertTrue(Arrays.asList(false).equals(columnIndex.getNullPages()));
+ assertTrue(List.of(0l).equals(columnIndex.getNullCounts()));
+ assertTrue(List.of(false).equals(columnIndex.getNullPages()));
minValues = columnIndex.getMinValues();
assertEquals(1, minValues.size());
maxValues = columnIndex.getMaxValues();
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReader.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReader.java
index 36a8707c9..4a4157e7a 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReader.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReader.java
@@ -27,7 +27,6 @@ import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
@@ -86,7 +85,7 @@ public class TestParquetReader {
@Parameterized.Parameters
public static Collection<Object[]> data() {
Object[][] data = new Object[][] {{FILE_V1}, {FILE_V2},
{STATIC_FILE_WITHOUT_COL_INDEXES}};
- return Arrays.asList(data);
+ return List.of(data);
}
@BeforeClass
@@ -117,7 +116,7 @@ public class TestParquetReader {
}
// row index of each row in the file is same as the user id.
users.add(new PhoneBookWriter.User(
- i, "p" + i, Arrays.asList(new PhoneBookWriter.PhoneNumber(i,
"cell")), location));
+ i, "p" + i, List.of(new PhoneBookWriter.PhoneNumber(i, "cell")),
location));
}
return users;
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReaderRandomAccess.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReaderRandomAccess.java
index 17e6181fc..fb99fa1b3 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReaderRandomAccess.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetReaderRandomAccess.java
@@ -30,7 +30,6 @@ import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Random;
@@ -152,7 +151,7 @@ public class TestParquetReaderRandomAccess {
super(path, buildSchema(), blockSize, pageSize, enableDictionary, true,
version);
this.random = new Random(seed);
- this.randomGenerators = Arrays.asList(new SequentialLongGenerator(), new
SequentialFlippingLongGenerator());
+ this.randomGenerators = List.of(new SequentialLongGenerator(), new
SequentialFlippingLongGenerator());
this.filter = FilterCompat.get(eq(longColumn("i64_flip"), 1L));
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriter.java
index 03cd98ac6..a7888b58d 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriter.java
@@ -18,7 +18,6 @@
*/
package org.apache.parquet.hadoop;
-import static java.util.Arrays.asList;
import static org.apache.parquet.column.Encoding.DELTA_BYTE_ARRAY;
import static org.apache.parquet.column.Encoding.PLAIN;
import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY;
@@ -47,6 +46,7 @@ import java.io.IOException;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
@@ -166,7 +166,7 @@ public class TestParquetWriter {
expected.put("1000-" + PARQUET_1_0, PLAIN);
expected.put("10-" + PARQUET_2_0, RLE_DICTIONARY);
expected.put("1000-" + PARQUET_2_0, DELTA_BYTE_ARRAY);
- for (int modulo : asList(10, 1000)) {
+ for (int modulo : List.of(10, 1000)) {
for (WriterVersion version : WriterVersion.values()) {
Path file = new Path(root, version.name() + "_" + modulo);
ParquetWriter<Group> writer = ExampleParquetWriter.builder(new
TestOutputFile(file, conf))
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterNewPage.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterNewPage.java
index 4c63efa19..f05d59f27 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterNewPage.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestParquetWriterNewPage.java
@@ -18,7 +18,6 @@
*/
package org.apache.parquet.hadoop;
-import static java.util.Arrays.asList;
import static org.apache.parquet.column.Encoding.DELTA_BYTE_ARRAY;
import static org.apache.parquet.column.Encoding.PLAIN;
import static org.apache.parquet.column.Encoding.PLAIN_DICTIONARY;
@@ -33,6 +32,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -79,7 +79,7 @@ public class TestParquetWriterNewPage {
expected.put("1000-" + PARQUET_1_0, PLAIN);
expected.put("10-" + PARQUET_2_0, RLE_DICTIONARY);
expected.put("1000-" + PARQUET_2_0, DELTA_BYTE_ARRAY);
- for (int modulo : asList(10, 1000)) {
+ for (int modulo : List.of(10, 1000)) {
for (WriterVersion version : WriterVersion.values()) {
Path file = new Path(root, version.name() + "_" + modulo);
ParquetWriter<Group> writer = new ParquetWriter<Group>(
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestStoreBloomFilter.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestStoreBloomFilter.java
index 701dcc419..e80d9ff3d 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestStoreBloomFilter.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/TestStoreBloomFilter.java
@@ -24,7 +24,6 @@ import static
org.apache.parquet.hadoop.TestBloomFiltering.generateDictionaryDat
import java.io.IOException;
import java.nio.file.Files;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -62,7 +61,7 @@ public class TestStoreBloomFilter {
@Parameterized.Parameters(name = "Run {index}: parquet {1}")
public static Collection<Object[]> params() {
- return Arrays.asList(new Object[] {FILE_V1, "v1"}, new Object[] {FILE_V2,
"v2"});
+ return List.of(new Object[] {FILE_V1, "v1"}, new Object[] {FILE_V2, "v2"});
}
@BeforeClass
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
index 61f68a923..f40e224e5 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/example/TestInputOutputFormat.java
@@ -31,7 +31,6 @@ import java.io.IOException;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
-import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -91,7 +90,7 @@ public class TestInputOutputFormat {
@Parameterized.Parameters(name = "vectored : {0}")
public static List<Boolean> params() {
- return Arrays.asList(true, false);
+ return List.of(true, false);
}
/**
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
index a2cb72176..a888ac2f4 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java
@@ -314,7 +314,7 @@ public class ParquetRewriterTest {
String[] encryptColumns = {"DocId"};
FileEncryptionProperties fileEncryptionProperties =
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false);
-
builder.encrypt(Arrays.asList(encryptColumns)).encryptionProperties(fileEncryptionProperties);
+
builder.encrypt(List.of(encryptColumns)).encryptionProperties(fileEncryptionProperties);
builder.indexCacheStrategy(indexCacheStrategy);
@@ -345,7 +345,7 @@ public class ParquetRewriterTest {
ParquetMetadata metaData = getFileMetaData(outputFile,
fileDecryptionProperties);
assertFalse(metaData.getBlocks().isEmpty());
List<ColumnChunkMetaData> columns =
metaData.getBlocks().get(0).getColumns();
- Set<String> set = new HashSet<>(Arrays.asList(encryptColumns));
+ Set<String> set = new HashSet<>(List.of(encryptColumns));
for (ColumnChunkMetaData column : columns) {
if (set.contains(column.getPath().toDotString())) {
assertTrue(column.isEncrypted());
@@ -477,7 +477,7 @@ public class ParquetRewriterTest {
RewriteOptions options = builder.mask(maskColumns)
.transform(CompressionCodecName.ZSTD)
- .encrypt(Arrays.asList(encryptColumns))
+ .encrypt(List.of(encryptColumns))
.encryptionProperties(fileEncryptionProperties)
.indexCacheStrategy(indexCacheStrategy)
.build();
@@ -508,7 +508,7 @@ public class ParquetRewriterTest {
// Verify the column is encrypted
ParquetMetadata metaData = getFileMetaData(outputFile,
fileDecryptionProperties);
assertFalse(metaData.getBlocks().isEmpty());
- Set<String> encryptedColumns = new
HashSet<>(Arrays.asList(encryptColumns));
+ Set<String> encryptedColumns = new HashSet<>(List.of(encryptColumns));
for (BlockMetaData blockMetaData : metaData.getBlocks()) {
List<ColumnChunkMetaData> columns = blockMetaData.getColumns();
for (ColumnChunkMetaData column : columns) {
@@ -610,7 +610,7 @@ public class ParquetRewriterTest {
.renameColumns(ImmutableMap.of("Name", "NameRenamed"))
.prune(pruneColumns)
.transform(CompressionCodecName.SNAPPY)
- .encrypt(Arrays.asList(encryptColumns))
+ .encrypt(List.of(encryptColumns))
.encryptionProperties(fileEncryptionProperties)
.build();
@@ -637,7 +637,7 @@ public class ParquetRewriterTest {
ParquetMetadata metaData = getFileMetaData(outputFile,
fileDecryptionProperties);
assertFalse(metaData.getBlocks().isEmpty());
- Set<String> encryptedColumns = new
HashSet<>(Arrays.asList(encryptColumns));
+ Set<String> encryptedColumns = new HashSet<>(List.of(encryptColumns));
for (BlockMetaData blockMetaData : metaData.getBlocks()) {
List<ColumnChunkMetaData> columns = blockMetaData.getColumns();
for (ColumnChunkMetaData column : columns) {
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
index ac3e89054..0f43ff599 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnEncryptorTest.java
@@ -30,7 +30,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
-import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -86,7 +85,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
verifyResultDecryptionWithValidKey();
}
@@ -98,7 +97,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
verifyResultDecryptionWithValidKey();
}
@@ -110,7 +109,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
verifyResultDecryptionWithValidKey();
}
@@ -122,7 +121,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
verifyResultDecryptionWithValidKey();
}
@@ -134,13 +133,13 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
ParquetMetadata metaData =
getParquetMetadata(EncDecProperties.getFileDecryptionProperties());
assertFalse(metaData.getBlocks().isEmpty());
List<ColumnChunkMetaData> columns =
metaData.getBlocks().get(0).getColumns();
- Set<String> set = new HashSet<>(Arrays.asList(encryptColumns));
+ Set<String> set = new HashSet<>(List.of(encryptColumns));
for (ColumnChunkMetaData column : columns) {
if (set.contains(column.getPath().toDotString())) {
assertTrue(column.isEncrypted());
@@ -157,7 +156,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, true));
verifyResultDecryptionWithValidKey();
@@ -170,7 +169,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_V1, true));
verifyResultDecryptionWithValidKey();
@@ -183,7 +182,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_V1, false));
verifyResultDecryptionWithValidKey();
@@ -199,7 +198,7 @@ public class ColumnEncryptorTest {
columnEncryptor.encryptColumns(
inputFile.getFileName(),
outputFile,
- Arrays.asList(encryptColumns),
+ List.of(encryptColumns),
EncDecProperties.getFileEncryptionProperties(encryptColumns,
ParquetCipher.AES_GCM_CTR_V1, false));
verifyResultDecryptionWithValidKey();
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnPrunerTest.java
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnPrunerTest.java
index c6be468ad..0582cabbb 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnPrunerTest.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/hadoop/util/ColumnPrunerTest.java
@@ -28,7 +28,6 @@ import static org.junit.Assert.assertEquals;
import java.io.IOException;
import java.nio.file.Files;
-import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -61,7 +60,7 @@ public class ColumnPrunerTest {
String outputFile = createTempFile("output");
// Remove column Gender
- List<String> cols = Arrays.asList("Gender");
+ List<String> cols = List.of("Gender");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
// Verify the schema are not changed for the columns not pruned
@@ -79,7 +78,7 @@ public class ColumnPrunerTest {
assertEquals(subFields.get(1).getName(), "Forward");
// Verify the data are not changed for the columns not pruned
- List<String> prunePaths = Arrays.asList("Gender");
+ List<String> prunePaths = List.of("Gender");
validateColumns(inputFile, prunePaths);
}
@@ -91,7 +90,7 @@ public class ColumnPrunerTest {
// Remove columns
String cargs[] = {inputFile, outputFile, "Name", "Gender"};
- List<String> cols = Arrays.asList("Name", "Gender");
+ List<String> cols = List.of("Name", "Gender");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
// Verify the schema are not changed for the columns not pruned
@@ -108,7 +107,7 @@ public class ColumnPrunerTest {
assertEquals(subFields.get(1).getName(), "Forward");
// Verify the data are not changed for the columns not pruned
- List<String> prunePaths = Arrays.asList("Name", "Gender");
+ List<String> prunePaths = List.of("Name", "Gender");
validateColumns(inputFile, prunePaths);
}
@@ -117,7 +116,7 @@ public class ColumnPrunerTest {
// Create Parquet file
String inputFile = createParquetFile("input");
String outputFile = createTempFile("output");
- List<String> cols = Arrays.asList("no_exist");
+ List<String> cols = List.of("no_exist");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
}
@@ -128,7 +127,7 @@ public class ColumnPrunerTest {
String outputFile = createTempFile("output");
// Remove nested column
- List<String> cols = Arrays.asList("Links.Backward");
+ List<String> cols = List.of("Links.Backward");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
// Verify the schema are not changed for the columns not pruned
@@ -146,7 +145,7 @@ public class ColumnPrunerTest {
assertEquals(subFields.get(0).getName(), "Forward");
// Verify the data are not changed for the columns not pruned
- List<String> prunePaths = Arrays.asList("Links.Backward");
+ List<String> prunePaths = List.of("Links.Backward");
validateColumns(inputFile, prunePaths);
}
@@ -157,7 +156,7 @@ public class ColumnPrunerTest {
String outputFile = createTempFile("output");
// Remove parent column. All of it's children will be removed.
- List<String> cols = Arrays.asList("Links");
+ List<String> cols = List.of("Links");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
// Verify the schema are not changed for the columns not pruned
@@ -171,7 +170,7 @@ public class ColumnPrunerTest {
assertEquals(fields.get(2).getName(), "Gender");
// Verify the data are not changed for the columns not pruned
- List<String> prunePaths = Arrays.asList("Links");
+ List<String> prunePaths = List.of("Links");
validateColumns(inputFile, prunePaths);
}
@@ -180,7 +179,7 @@ public class ColumnPrunerTest {
// Create Parquet file
String inputFile = createParquetFile("input");
String outputFile = createTempFile("output");
- List<String> cols = Arrays.asList("Links.Not_exists");
+ List<String> cols = List.of("Links.Not_exists");
columnPruner.pruneColumns(conf, new Path(inputFile), new Path(outputFile),
cols);
}
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestColumnIndexes.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestColumnIndexes.java
index 6573c409e..8af975e3e 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestColumnIndexes.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestColumnIndexes.java
@@ -323,7 +323,7 @@ public class TestColumnIndexes {
@Parameters
public static Collection<WriteContext> getContexts() {
- return Arrays.asList(
+ return List.of(
new WriteContext(System.nanoTime(), 1000, 8),
new WriteContext(System.nanoTime(), 20000, 64),
new WriteContext(System.nanoTime(), 50000, 10));
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16ReadWriteRoundTrip.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16ReadWriteRoundTrip.java
index 9bbc6d1c6..8251ab212 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16ReadWriteRoundTrip.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16ReadWriteRoundTrip.java
@@ -25,7 +25,6 @@ import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -140,14 +139,14 @@ public class TestFloat16ReadWriteRoundTrip {
@Test
public void testFloat16ColumnIndex() throws IOException {
- List<Binary[]> testValues = Arrays.asList(
+ List<Binary[]> testValues = List.of(
valuesInAscendingOrder,
valuesInDescendingOrder,
valuesUndefinedOrder,
valuesAllPositiveZero,
valuesAllNegativeZero,
valuesWithNaN);
- List<Binary[]> expectedValues = Arrays.asList(
+ List<Binary[]> expectedValues = List.of(
valuesInAscendingOrderMinMax,
valuesInDescendingOrderMinMax,
valuesUndefinedOrderMinMax,
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16Statistics.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16Statistics.java
index 5e82740a4..8617ea260 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16Statistics.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestFloat16Statistics.java
@@ -24,7 +24,6 @@ import static org.junit.Assert.assertArrayEquals;
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -141,14 +140,14 @@ public class TestFloat16Statistics {
@Test
public void testFloat16StatisticsMultipleCases() throws IOException {
- List<Binary[]> testValues = Arrays.asList(
+ List<Binary[]> testValues = List.of(
valuesInAscendingOrder,
valuesInDescendingOrder,
valuesUndefinedOrder,
valuesAllPositiveZero,
valuesAllNegativeZero,
valuesWithNaN);
- List<Binary[]> expectedValues = Arrays.asList(
+ List<Binary[]> expectedValues = List.of(
valuesInAscendingOrderMinMax,
valuesInDescendingOrderMinMax,
valuesUndefinedOrderMinMax,
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestSizeStatisticsRoundTrip.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestSizeStatisticsRoundTrip.java
index 026e13a3b..0906d7cf2 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestSizeStatisticsRoundTrip.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestSizeStatisticsRoundTrip.java
@@ -20,8 +20,8 @@ package org.apache.parquet.statistics;
import java.io.File;
import java.io.IOException;
-import java.util.Arrays;
import java.util.Collections;
+import java.util.List;
import java.util.Optional;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -141,12 +141,12 @@ public class TestSizeStatisticsRoundTrip {
SizeStatistics sizeStatistics = column.getSizeStatistics();
Assert.assertEquals(Optional.of(3L),
sizeStatistics.getUnencodedByteArrayDataBytes());
- Assert.assertEquals(Arrays.asList(2L, 1L),
sizeStatistics.getRepetitionLevelHistogram());
- Assert.assertEquals(Arrays.asList(0L, 0L, 0L, 3L),
sizeStatistics.getDefinitionLevelHistogram());
+ Assert.assertEquals(List.of(2L, 1L),
sizeStatistics.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(0L, 0L, 0L, 3L),
sizeStatistics.getDefinitionLevelHistogram());
ColumnIndex columnIndex = reader.readColumnIndex(column);
- Assert.assertEquals(Arrays.asList(2L, 1L),
sizeStatistics.getRepetitionLevelHistogram());
- Assert.assertEquals(Arrays.asList(0L, 0L, 0L, 3L),
sizeStatistics.getDefinitionLevelHistogram());
+ Assert.assertEquals(List.of(2L, 1L),
sizeStatistics.getRepetitionLevelHistogram());
+ Assert.assertEquals(List.of(0L, 0L, 0L, 3L),
sizeStatistics.getDefinitionLevelHistogram());
OffsetIndex offsetIndex = reader.readOffsetIndex(column);
Assert.assertEquals(1, offsetIndex.getPageCount());
diff --git
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
index abca17ede..16e93d6e0 100644
---
a/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
+++
b/parquet-hadoop/src/test/java/org/apache/parquet/statistics/TestStatistics.java
@@ -35,7 +35,6 @@ import com.google.common.collect.ImmutableSet;
import java.io.File;
import java.io.IOException;
import java.math.BigInteger;
-import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Random;
@@ -346,7 +345,7 @@ public class TestStatistics {
int fixedLength =
schema.getType("fixed-binary").asPrimitiveType().getTypeLength();
- randomGenerators = Arrays.<RandomValueGenerator<?>>asList(
+ randomGenerators = List.<RandomValueGenerator<?>>of(
new RandomValues.IntGenerator(random.nextLong()),
new RandomValues.LongGenerator(random.nextLong()),
new RandomValues.Int96Generator(random.nextLong()),
@@ -545,7 +544,7 @@ public class TestStatistics {
int blockSize = (random.nextInt(54) + 10) * MEGABYTE;
int pageSize = (random.nextInt(10) + 1) * MEGABYTE;
- List<DataContext> contexts = Arrays.asList(
+ List<DataContext> contexts = List.of(
new DataContext(
random.nextLong(),
file,
@@ -592,7 +591,7 @@ public class TestStatistics {
int blockSize = (random.nextInt(54) + 10) * MEGABYTE;
int pageSize = (random.nextInt(10) + 1) * MEGABYTE;
- List<DataContext> contexts = Arrays.asList(
+ List<DataContext> contexts = List.of(
new DataContext(
random.nextLong(),
file,