This is an automated email from the ASF dual-hosted git repository.
fokko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/parquet-mr.git
The following commit(s) were added to refs/heads/master by this push:
new d3e3118 PARQUET-1710: Use Objects.requireNonNull (#703)
d3e3118 is described below
commit d3e3118150d3fa5d2831443ddc944d7416b9a9f6
Author: belugabehr <[email protected]>
AuthorDate: Sun Jan 26 15:06:27 2020 -0500
PARQUET-1710: Use Objects.requireNonNull (#703)
---
.../parquet/cascading/ParquetTupleScheme.java | 7 ++---
.../parquet/cascading/ParquetTupleScheme.java | 9 +++---
.../parquet/cascading/ParquetValueScheme.java | 20 +++++++++----
.../java/org/apache/parquet/cli/csv/AvroCSV.java | 8 +++---
.../apache/parquet/column/ParquetProperties.java | 9 +++---
.../parquet/column/impl/ColumnReaderBase.java | 8 +++---
.../apache/parquet/column/page/DictionaryPage.java | 7 ++---
.../org/apache/parquet/filter/AndRecordFilter.java | 6 ++--
.../apache/parquet/filter/ColumnPredicates.java | 7 +++--
.../apache/parquet/filter/ColumnRecordFilter.java | 6 ++--
.../org/apache/parquet/filter/NotRecordFilter.java | 5 +++-
.../org/apache/parquet/filter/OrRecordFilter.java | 7 +++--
.../parquet/filter2/compat/FilterCompat.java | 9 +++---
.../filter2/predicate/LogicalInverseRewriter.java | 5 ++--
.../parquet/filter2/predicate/LogicalInverter.java | 4 +--
.../parquet/filter2/predicate/Operators.java | 33 ++++++++++------------
.../predicate/SchemaCompatibilityValidator.java | 7 ++---
.../parquet/filter2/predicate/Statistics.java | 13 ++++-----
.../recordlevel/FilteringGroupConverter.java | 12 ++++----
.../recordlevel/FilteringPrimitiveConverter.java | 6 ++--
.../recordlevel/FilteringRecordMaterializer.java | 11 ++++----
.../IncrementallyUpdatedFilterPredicate.java | 6 ++--
...crementallyUpdatedFilterPredicateEvaluator.java | 5 ++--
...ncrementallyUpdatedFilterPredicateResetter.java | 5 ++--
.../org/apache/parquet/io/MessageColumnIO.java | 9 +++---
.../org/apache/parquet/schema/ColumnOrder.java | 4 +--
.../main/java/org/apache/parquet/schema/Type.java | 11 ++++----
.../main/java/org/apache/parquet/schema/Types.java | 12 ++++----
.../parquet/column/page/mem/MemPageReader.java | 6 ++--
.../java/org/apache/parquet/Preconditions.java | 4 +++
.../java/org/apache/parquet/glob/WildcardPath.java | 7 ++---
.../apache/parquet/hadoop/metadata/ColumnPath.java | 5 ++--
.../parquet/filter2/compat/RowGroupFilter.java | 15 +++++-----
.../filter2/dictionarylevel/DictionaryFilter.java | 8 ++----
.../filter2/statisticslevel/StatisticsFilter.java | 7 ++---
.../apache/parquet/hadoop/DirectCodecFactory.java | 7 +++--
.../hadoop/InternalParquetRecordReader.java | 4 +--
.../hadoop/InternalParquetRecordWriter.java | 4 +--
.../org/apache/parquet/hadoop/MemoryManager.java | 6 ++--
.../apache/parquet/hadoop/ParquetOutputFormat.java | 6 ++--
.../org/apache/parquet/hadoop/ParquetReader.java | 15 +++++-----
.../apache/parquet/hadoop/ParquetRecordWriter.java | 7 ++---
.../apache/parquet/hadoop/api/WriteSupport.java | 15 ++++++----
.../parquet/hadoop/example/GroupWriteSupport.java | 6 ++--
.../parquet/hadoop/metadata/FileMetaData.java | 9 ++++--
.../parquet/hadoop/metadata/GlobalMetaData.java | 9 ++++--
.../apache/parquet/hadoop/util/HadoopStreams.java | 6 ++--
.../org/apache/parquet/thrift/ConvertedField.java | 10 +++----
.../apache/parquet/thrift/ThriftParquetReader.java | 11 ++++----
.../parquet/thrift/ThriftSchemaConvertVisitor.java | 5 ++--
.../DeprecatedFieldProjectionFilter.java | 4 +--
51 files changed, 221 insertions(+), 206 deletions(-)
diff --git
a/parquet-cascading/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
b/parquet-cascading/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
index a62199f..22e67ea 100644
---
a/parquet-cascading/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
+++
b/parquet-cascading/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.util.List;
+import java.util.Objects;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
@@ -45,8 +46,6 @@ import
org.apache.parquet.hadoop.mapred.DeprecatedParquetInputFormat;
import org.apache.parquet.hadoop.mapred.DeprecatedParquetOutputFormat;
import org.apache.parquet.schema.MessageType;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* A Cascading Scheme that converts Parquet groups into Cascading tuples.
* If you provide it with sourceFields, it will selectively materialize only
the columns for those fields.
@@ -75,12 +74,12 @@ public class ParquetTupleScheme extends Scheme<JobConf,
RecordReader, OutputColl
}
public ParquetTupleScheme(FilterPredicate filterPredicate) {
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
"filterPredicate cannot be null");
}
public ParquetTupleScheme(FilterPredicate filterPredicate, Fields
sourceFields) {
super(sourceFields);
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
"filterPredicate cannot be null");
}
/**
diff --git
a/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
b/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
index 23220fb..bb04d48 100644
---
a/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
+++
b/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetTupleScheme.java
@@ -20,6 +20,7 @@
import java.io.IOException;
import java.util.List;
+import java.util.Objects;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
@@ -45,8 +46,6 @@ import
org.apache.parquet.hadoop.mapred.DeprecatedParquetInputFormat;
import org.apache.parquet.hadoop.mapred.DeprecatedParquetOutputFormat;
import org.apache.parquet.schema.MessageType;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* A Cascading Scheme that converts Parquet groups into Cascading tuples.
* If you provide it with sourceFields, it will selectively materialize only
the columns for those fields.
@@ -74,12 +73,14 @@ public class ParquetTupleScheme extends Scheme<JobConf,
RecordReader, OutputColl
}
public ParquetTupleScheme(FilterPredicate filterPredicate) {
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
+ "filterPredicate cannot be null");
}
public ParquetTupleScheme(FilterPredicate filterPredicate, Fields
sourceFields) {
super(sourceFields);
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
+ "filterPredicate cannot be null");
}
/**
diff --git
a/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetValueScheme.java
b/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetValueScheme.java
index 28f7f32..5296c58 100644
---
a/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetValueScheme.java
+++
b/parquet-cascading3/src/main/java/org/apache/parquet/cascading/ParquetValueScheme.java
@@ -20,6 +20,7 @@ package org.apache.parquet.cascading;
import java.io.IOException;
import java.io.Serializable;
+import java.util.Objects;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
@@ -38,8 +39,6 @@ import org.apache.parquet.hadoop.mapred.Container;
import org.apache.parquet.hadoop.thrift.ParquetThriftInputFormat;
import org.apache.parquet.hadoop.thrift.ThriftReadSupport;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* A Cascading Scheme that returns a simple Tuple with a single value, the
"value" object
* coming out of the underlying InputFormat.
@@ -87,20 +86,29 @@ public abstract class ParquetValueScheme<T> extends
Scheme<JobConf, RecordReader
}
public Config<T> withFilterPredicate(FilterPredicate f) {
- return new Config<T>(this.klass, checkNotNull(f, "filterPredicate"),
this.deprecatedProjectionString, this.strictProjectionString);
+ return new Config<T>(this.klass,
+ Objects.requireNonNull(f, "filterPredicate cannot be null"),
+ this.deprecatedProjectionString, this.strictProjectionString);
}
@Deprecated
public Config<T> withProjectionString(String p) {
- return new Config<T>(this.klass, this.filterPredicate, checkNotNull(p,
"projectionString"), this.strictProjectionString);
+ return new Config<T>(this.klass, this.filterPredicate,
+ Objects.requireNonNull(p, "projectionString cannot be null"),
+ this.strictProjectionString);
}
public Config<T> withStrictProjectionString(String p) {
- return new Config<T>(this.klass, this.filterPredicate,
this.deprecatedProjectionString, checkNotNull(p, "projectionString"));
+ return new Config<T>(this.klass, this.filterPredicate,
+ this.deprecatedProjectionString,
+ Objects.requireNonNull(p, "projectionString cannot be null"));
}
public Config<T> withRecordClass(Class<T> klass) {
- return new Config<T>(checkNotNull(klass, "recordClass"),
this.filterPredicate, this.deprecatedProjectionString,
this.strictProjectionString);
+ return new Config<T>(
+ Objects.requireNonNull(klass, "recordClass cannot be null"),
+ this.filterPredicate, this.deprecatedProjectionString,
+ this.strictProjectionString);
}
}
diff --git a/parquet-cli/src/main/java/org/apache/parquet/cli/csv/AvroCSV.java
b/parquet-cli/src/main/java/org/apache/parquet/cli/csv/AvroCSV.java
index 47cd665..6113d4c 100644
--- a/parquet-cli/src/main/java/org/apache/parquet/cli/csv/AvroCSV.java
+++ b/parquet-cli/src/main/java/org/apache/parquet/cli/csv/AvroCSV.java
@@ -22,7 +22,6 @@ package org.apache.parquet.cli.csv;
import au.com.bytecode.opencsv.CSVParser;
import au.com.bytecode.opencsv.CSVReader;
import com.google.common.base.CharMatcher;
-import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import org.apache.avro.Schema;
@@ -31,6 +30,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
+import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
@@ -104,17 +104,17 @@ public class AvroCSV {
// read the header and then the first line
header = reader.readNext();
line = reader.readNext();
- Preconditions.checkNotNull(line, "No content to infer schema");
+ Objects.requireNonNull(line, "No content to infer schema");
} else if (props.header != null) {
header = newParser(props).parseLine(props.header);
line = reader.readNext();
- Preconditions.checkNotNull(line, "No content to infer schema");
+ Objects.requireNonNull(line, "No content to infer schema");
} else {
// use the first line to create a header
line = reader.readNext();
- Preconditions.checkNotNull(line, "No content to infer schema");
+ Objects.requireNonNull(line, "No content to infer schema");
header = new String[line.length];
for (int i = 0; i < line.length; i += 1) {
header[i] = "field_" + String.valueOf(i);
diff --git
a/parquet-column/src/main/java/org/apache/parquet/column/ParquetProperties.java
b/parquet-column/src/main/java/org/apache/parquet/column/ParquetProperties.java
index c022b72..45a4aac 100644
---
a/parquet-column/src/main/java/org/apache/parquet/column/ParquetProperties.java
+++
b/parquet-column/src/main/java/org/apache/parquet/column/ParquetProperties.java
@@ -24,6 +24,9 @@ import org.apache.parquet.bytes.CapacityByteArrayOutputStream;
import org.apache.parquet.bytes.HeapByteBufferAllocator;
import static org.apache.parquet.bytes.BytesUtils.getWidthFromMaxInt;
+
+import java.util.Objects;
+
import org.apache.parquet.column.impl.ColumnWriteStoreV1;
import org.apache.parquet.column.impl.ColumnWriteStoreV2;
import org.apache.parquet.column.page.PageWriteStore;
@@ -326,14 +329,12 @@ public class ParquetProperties {
}
public Builder withAllocator(ByteBufferAllocator allocator) {
- Preconditions.checkNotNull(allocator, "ByteBufferAllocator");
- this.allocator = allocator;
+ this.allocator = Objects.requireNonNull(allocator, "ByteBufferAllocator
cannot be null");
return this;
}
public Builder withValuesWriterFactory(ValuesWriterFactory factory) {
- Preconditions.checkNotNull(factory, "ValuesWriterFactory");
- this.valuesWriterFactory = factory;
+ this.valuesWriterFactory = Objects.requireNonNull(factory,
"ValuesWriterFactory cannot be null");
return this;
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/column/impl/ColumnReaderBase.java
b/parquet-column/src/main/java/org/apache/parquet/column/impl/ColumnReaderBase.java
index 4f8b50e..966861e 100644
---
a/parquet-column/src/main/java/org/apache/parquet/column/impl/ColumnReaderBase.java
+++
b/parquet-column/src/main/java/org/apache/parquet/column/impl/ColumnReaderBase.java
@@ -19,12 +19,12 @@
package org.apache.parquet.column.impl;
import static java.lang.String.format;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static org.apache.parquet.column.ValuesType.DEFINITION_LEVEL;
import static org.apache.parquet.column.ValuesType.REPETITION_LEVEL;
import static org.apache.parquet.column.ValuesType.VALUES;
import java.io.IOException;
+import java.util.Objects;
import org.apache.parquet.CorruptDeltaByteArrays;
import org.apache.parquet.VersionParser.ParsedVersion;
@@ -402,9 +402,9 @@ abstract class ColumnReaderBase implements ColumnReader {
* @param writerVersion writer version string from the Parquet file being
read
*/
ColumnReaderBase(ColumnDescriptor path, PageReader pageReader,
PrimitiveConverter converter, ParsedVersion writerVersion) {
- this.path = checkNotNull(path, "path");
- this.pageReader = checkNotNull(pageReader, "pageReader");
- this.converter = checkNotNull(converter, "converter");
+ this.path = Objects.requireNonNull(path, "path cannot be null");
+ this.pageReader = Objects.requireNonNull(pageReader, "pageReader cannot be
null");
+ this.converter = Objects.requireNonNull(converter, "converter cannot be
null");
this.writerVersion = writerVersion;
this.maxDefinitionLevel = path.getMaxDefinitionLevel();
DictionaryPage dictionaryPage = pageReader.readDictionaryPage();
diff --git
a/parquet-column/src/main/java/org/apache/parquet/column/page/DictionaryPage.java
b/parquet-column/src/main/java/org/apache/parquet/column/page/DictionaryPage.java
index 21e1114..35c57ce 100644
---
a/parquet-column/src/main/java/org/apache/parquet/column/page/DictionaryPage.java
+++
b/parquet-column/src/main/java/org/apache/parquet/column/page/DictionaryPage.java
@@ -18,9 +18,8 @@
*/
package org.apache.parquet.column.page;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
import java.io.IOException;
+import java.util.Objects;
import org.apache.parquet.bytes.BytesInput;
import org.apache.parquet.column.Encoding;
@@ -53,9 +52,9 @@ public class DictionaryPage extends Page {
*/
public DictionaryPage(BytesInput bytes, int uncompressedSize, int
dictionarySize, Encoding encoding) {
super(Math.toIntExact(bytes.size()), uncompressedSize);
- this.bytes = checkNotNull(bytes, "bytes");
+ this.bytes = Objects.requireNonNull(bytes, "bytes cannot be null");
this.dictionarySize = dictionarySize;
- this.encoding = checkNotNull(encoding, "encoding");
+ this.encoding = Objects.requireNonNull(encoding, "encoding cannot be
null");
}
public BytesInput getBytes() {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java
b/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java
index 9ae417b..9f20da4 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java
@@ -18,6 +18,8 @@
*/
package org.apache.parquet.filter;
+import java.util.Objects;
+
import org.apache.parquet.Preconditions;
import org.apache.parquet.column.ColumnReader;
@@ -38,8 +40,8 @@ public final class AndRecordFilter implements RecordFilter {
* @return an unbound and filter
*/
public static final UnboundRecordFilter and( final UnboundRecordFilter
filter1, final UnboundRecordFilter filter2 ) {
- Preconditions.checkNotNull( filter1, "filter1" );
- Preconditions.checkNotNull( filter2, "filter2" );
+ Objects.requireNonNull(filter1, "filter1 cannot be null");
+ Objects.requireNonNull(filter2, "filter2 cannot be null");
return new UnboundRecordFilter() {
@Override
public RecordFilter bind(Iterable<ColumnReader> readers) {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnPredicates.java
b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnPredicates.java
index 6bcdace..4d9d712 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnPredicates.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnPredicates.java
@@ -18,7 +18,8 @@
*/
package org.apache.parquet.filter;
-import org.apache.parquet.Preconditions;
+import java.util.Objects;
+
import org.apache.parquet.column.ColumnReader;
import org.apache.parquet.io.api.Binary;
@@ -60,7 +61,7 @@ public class ColumnPredicates {
}
public static Predicate equalTo(final String target) {
- Preconditions.checkNotNull(target,"target");
+ Objects.requireNonNull(target, "target cannot be null");
return new Predicate() {
@Override
public boolean apply(ColumnReader input) {
@@ -169,7 +170,7 @@ public class ColumnPredicates {
}
public static <E extends Enum> Predicate equalTo(final E target) {
- Preconditions.checkNotNull(target,"target");
+ Objects.requireNonNull(target, "target cannot be null");
final String targetAsString = target.name();
return new Predicate() {
@Override
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java
b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java
index 4e048ba..b085b98 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java
@@ -20,7 +20,7 @@ package org.apache.parquet.filter;
import org.apache.parquet.column.ColumnReader;
import java.util.Arrays;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* Record filter which applies the supplied predicate to the specified column.
@@ -41,8 +41,8 @@ public final class ColumnRecordFilter implements RecordFilter
{
*/
public static final UnboundRecordFilter column(final String columnPath,
final
ColumnPredicates.Predicate predicate) {
- checkNotNull(columnPath, "columnPath");
- checkNotNull(predicate, "predicate");
+ Objects.requireNonNull(columnPath, "columnPath cannot be null");
+ Objects.requireNonNull(predicate, "predicate cannot be null");
return new UnboundRecordFilter() {
final String[] filterPath = columnPath.split("\\.");
@Override
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java
b/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java
index 24a23f1..b76d017 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java
@@ -18,6 +18,8 @@
*/
package org.apache.parquet.filter;
+import java.util.Objects;
+
import org.apache.parquet.Preconditions;
import org.apache.parquet.column.ColumnReader;
@@ -34,7 +36,8 @@ public final class NotRecordFilter implements RecordFilter {
* @return a not record filter
*/
public static final UnboundRecordFilter not( final UnboundRecordFilter
filter) {
- Preconditions.checkNotNull( filter, "filter" );
+ Objects.requireNonNull(filter, "filter cannot be null");
+
return new UnboundRecordFilter() {
@Override
public RecordFilter bind(Iterable<ColumnReader> readers) {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java
b/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java
index 0c6f71c..2c99b23 100644
--- a/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java
+++ b/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java
@@ -18,7 +18,8 @@
*/
package org.apache.parquet.filter;
-import org.apache.parquet.Preconditions;
+import java.util.Objects;
+
import org.apache.parquet.column.ColumnReader;
/**
@@ -36,8 +37,8 @@ public final class OrRecordFilter implements RecordFilter {
* @return an or record filter
*/
public static final UnboundRecordFilter or( final UnboundRecordFilter
filter1, final UnboundRecordFilter filter2 ) {
- Preconditions.checkNotNull( filter1, "filter1" );
- Preconditions.checkNotNull( filter2, "filter2" );
+ Objects.requireNonNull(filter1, "filter1 cannot be null");
+ Objects.requireNonNull(filter2, "filter2 cannot be null");
return new UnboundRecordFilter() {
@Override
public RecordFilter bind(Iterable<ColumnReader> readers) {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/compat/FilterCompat.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/compat/FilterCompat.java
index f8e62bc..463a54c 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/compat/FilterCompat.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/compat/FilterCompat.java
@@ -25,7 +25,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.parquet.Preconditions.checkArgument;
-import static org.apache.parquet.Preconditions.checkNotNull;
+
+import java.util.Objects;
/**
* Parquet currently has two ways to specify a filter for dropping records at
read time.
@@ -69,7 +70,7 @@ public class FilterCompat {
* @return a filter for the given predicate
*/
public static Filter get(FilterPredicate filterPredicate) {
- checkNotNull(filterPredicate, "filterPredicate");
+ Objects.requireNonNull(filterPredicate, "filterPredicate cannot be null");
LOG.info("Filtering using predicate: {}", filterPredicate);
@@ -125,7 +126,7 @@ public class FilterCompat {
private final FilterPredicate filterPredicate;
private FilterPredicateCompat(FilterPredicate filterPredicate) {
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
"filterPredicate cannot be null");
}
public FilterPredicate getFilterPredicate() {
@@ -143,7 +144,7 @@ public class FilterCompat {
private final UnboundRecordFilter unboundRecordFilter;
private UnboundRecordFilterCompat(UnboundRecordFilter unboundRecordFilter)
{
- this.unboundRecordFilter = checkNotNull(unboundRecordFilter,
"unboundRecordFilter");
+ this.unboundRecordFilter = Objects.requireNonNull(unboundRecordFilter,
"unboundRecordFilter cannot be null");
}
public UnboundRecordFilter getUnboundRecordFilter() {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverseRewriter.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverseRewriter.java
index bacf1c7..88cb836 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverseRewriter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverseRewriter.java
@@ -31,10 +31,11 @@ import org.apache.parquet.filter2.predicate.Operators.NotEq;
import org.apache.parquet.filter2.predicate.Operators.Or;
import org.apache.parquet.filter2.predicate.Operators.UserDefined;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static org.apache.parquet.filter2.predicate.FilterApi.and;
import static org.apache.parquet.filter2.predicate.FilterApi.or;
+import java.util.Objects;
+
/**
* Recursively removes all use of the not() operator in a predicate
* by replacing all instances of not(x) with the inverse(x),
@@ -50,7 +51,7 @@ public final class LogicalInverseRewriter implements
Visitor<FilterPredicate> {
private static final LogicalInverseRewriter INSTANCE = new
LogicalInverseRewriter();
public static FilterPredicate rewrite(FilterPredicate pred) {
- checkNotNull(pred, "pred");
+ Objects.requireNonNull(pred, "pred cannot be null");
return pred.accept(INSTANCE);
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverter.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverter.java
index eb57c7e..cc0186b 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/LogicalInverter.java
@@ -31,7 +31,7 @@ import org.apache.parquet.filter2.predicate.Operators.NotEq;
import org.apache.parquet.filter2.predicate.Operators.Or;
import org.apache.parquet.filter2.predicate.Operators.UserDefined;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* Converts a {@link FilterPredicate} to its logical inverse.
@@ -45,7 +45,7 @@ public final class LogicalInverter implements
Visitor<FilterPredicate> {
private static final LogicalInverter INSTANCE = new LogicalInverter();
public static FilterPredicate invert(FilterPredicate pred) {
- checkNotNull(pred, "pred");
+ Objects.requireNonNull(pred, "pred cannot be null");
return pred.accept(INSTANCE);
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Operators.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Operators.java
index f39a5d3..9a1696c 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Operators.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Operators.java
@@ -20,12 +20,11 @@ package org.apache.parquet.filter2.predicate;
import java.io.Serializable;
import java.util.Locale;
+import java.util.Objects;
import org.apache.parquet.hadoop.metadata.ColumnPath;
import org.apache.parquet.io.api.Binary;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* These are the operators in a filter predicate expression tree.
* They are constructed by using the methods in {@link FilterApi}
@@ -38,10 +37,8 @@ public final class Operators {
private final Class<T> columnType;
protected Column(ColumnPath columnPath, Class<T> columnType) {
- checkNotNull(columnPath, "columnPath");
- checkNotNull(columnType, "columnType");
- this.columnPath = columnPath;
- this.columnType = columnType;
+ this.columnPath = Objects.requireNonNull(columnPath, "columnPath cannot
be null");;
+ this.columnType = Objects.requireNonNull(columnType, "columnType cannot
be null");;
}
public Class<T> getColumnType() {
@@ -124,7 +121,7 @@ public final class Operators {
private final String toString;
protected ColumnFilterPredicate(Column<T> column, T value) {
- this.column = checkNotNull(column, "column");
+ this.column = Objects.requireNonNull(column, "column cannot be null");
// Eq and NotEq allow value to be null, Lt, Gt, LtEq, GtEq however do
not, so they guard against
// null in their own constructors.
@@ -201,7 +198,7 @@ public final class Operators {
// value cannot be null
Lt(Column<T> column, T value) {
- super(column, checkNotNull(value, "value"));
+ super(column, Objects.requireNonNull(value, "value cannot be null"));
}
@Override
@@ -214,7 +211,7 @@ public final class Operators {
// value cannot be null
LtEq(Column<T> column, T value) {
- super(column, checkNotNull(value, "value"));
+ super(column, Objects.requireNonNull(value, "value cannot be null"));
}
@Override
@@ -228,7 +225,7 @@ public final class Operators {
// value cannot be null
Gt(Column<T> column, T value) {
- super(column, checkNotNull(value, "value"));
+ super(column, Objects.requireNonNull(value, "value cannot be null"));
}
@Override
@@ -241,7 +238,7 @@ public final class Operators {
// value cannot be null
GtEq(Column<T> column, T value) {
- super(column, checkNotNull(value, "value"));
+ super(column, Objects.requireNonNull(value, "value cannot be null"));
}
@Override
@@ -257,8 +254,8 @@ public final class Operators {
private final String toString;
protected BinaryLogicalFilterPredicate(FilterPredicate left,
FilterPredicate right) {
- this.left = checkNotNull(left, "left");
- this.right = checkNotNull(right, "right");
+ this.left = Objects.requireNonNull(left, "left cannot be null");
+ this.right = Objects.requireNonNull(right, "right cannot be null");
String name = getClass().getSimpleName().toLowerCase(Locale.ENGLISH);
this.toString = name + "(" + left + ", " + right + ")";
}
@@ -327,7 +324,7 @@ public final class Operators {
private final String toString;
Not(FilterPredicate predicate) {
- this.predicate = checkNotNull(predicate, "predicate");
+ this.predicate = Objects.requireNonNull(predicate, "predicate cannot be
null");
this.toString = "not(" + predicate + ")";
}
@@ -363,7 +360,7 @@ public final class Operators {
protected final Column<T> column;
UserDefined(Column<T> column) {
- this.column = checkNotNull(column, "column");
+ this.column = Objects.requireNonNull(column, "column cannot be null");
}
public Column<T> getColumn() {
@@ -386,7 +383,7 @@ public final class Operators {
UserDefinedByClass(Column<T> column, Class<U> udpClass) {
super(column);
- this.udpClass = checkNotNull(udpClass, "udpClass");
+ this.udpClass = Objects.requireNonNull(udpClass, "udpClass cannot be
null");
String name = getClass().getSimpleName().toLowerCase(Locale.ENGLISH);
this.toString = name + "(" + column.getColumnPath().toDotString() + ", "
+ udpClass.getName() + ")";
@@ -440,7 +437,7 @@ public final class Operators {
UserDefinedByInstance(Column<T> column, U udpInstance) {
super(column);
- this.udpInstance = checkNotNull(udpInstance, "udpInstance");
+ this.udpInstance = Objects.requireNonNull(udpInstance, "udpInstance
cannot be null");
String name = getClass().getSimpleName().toLowerCase(Locale.ENGLISH);
this.toString = name + "(" + column.getColumnPath().toDotString() + ", "
+ udpInstance + ")";
}
@@ -484,7 +481,7 @@ public final class Operators {
private final String toString;
LogicalNotUserDefined(UserDefined<T, U> userDefined) {
- this.udp = checkNotNull(userDefined, "userDefined");
+ this.udp = Objects.requireNonNull(userDefined, "userDefined cannot be
null");
this.toString = "inverted(" + udp + ")";
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/SchemaCompatibilityValidator.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/SchemaCompatibilityValidator.java
index fe8cf98..c75036b 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/SchemaCompatibilityValidator.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/SchemaCompatibilityValidator.java
@@ -20,6 +20,7 @@ package org.apache.parquet.filter2.predicate;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.filter2.predicate.Operators.And;
@@ -38,8 +39,6 @@ import
org.apache.parquet.filter2.predicate.Operators.UserDefined;
import org.apache.parquet.hadoop.metadata.ColumnPath;
import org.apache.parquet.schema.MessageType;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* Inspects the column types found in the provided {@link FilterPredicate} and
compares them
* to the actual schema found in the parquet file. If the provided predicate's
types are
@@ -57,8 +56,8 @@ import static org.apache.parquet.Preconditions.checkNotNull;
public class SchemaCompatibilityValidator implements
FilterPredicate.Visitor<Void> {
public static void validate(FilterPredicate predicate, MessageType schema) {
- checkNotNull(predicate, "predicate");
- checkNotNull(schema, "schema");
+ Objects.requireNonNull(predicate, "predicate cannot be null");
+ Objects.requireNonNull(schema, "schema cannot be null");
predicate.accept(new SchemaCompatibilityValidator(schema));
}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Statistics.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Statistics.java
index 3f67a15..ab02857 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Statistics.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/predicate/Statistics.java
@@ -19,8 +19,7 @@
package org.apache.parquet.filter2.predicate;
import java.util.Comparator;
-
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* Contains statistics about a group of records
@@ -40,8 +39,8 @@ public class Statistics<T> {
*/
@Deprecated
public Statistics(T min, T max) {
- this.min = checkNotNull(min, "min");
- this.max = checkNotNull(max, "max");
+ this.min = Objects.requireNonNull(min, "min cannot be null");
+ this.max = Objects.requireNonNull(max, "max cannot be null");
this.comparator = null;
}
@@ -53,9 +52,9 @@ public class Statistics<T> {
* @param comparator a comparator to use when comparing values described by
this statistics instance
*/
public Statistics(T min, T max, Comparator<T> comparator) {
- this.min = checkNotNull(min, "min");
- this.max = checkNotNull(max, "max");
- this.comparator = checkNotNull(comparator, "comparator");
+ this.min = Objects.requireNonNull(min, "min cannot be null");
+ this.max = Objects.requireNonNull(max, "max cannot be null");
+ this.comparator = Objects.requireNonNull(comparator, "comparator cannot be
null");
}
/**
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringGroupConverter.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringGroupConverter.java
index 0c3556c..00ddd8b 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringGroupConverter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringGroupConverter.java
@@ -21,6 +21,7 @@ package org.apache.parquet.filter2.recordlevel;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.hadoop.metadata.ColumnPath;
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
@@ -29,7 +30,6 @@ import org.apache.parquet.io.api.Converter;
import org.apache.parquet.io.api.GroupConverter;
import static org.apache.parquet.Preconditions.checkArgument;
-import static org.apache.parquet.Preconditions.checkNotNull;
/**
* See {@link FilteringRecordMaterializer}
@@ -56,10 +56,10 @@ public class FilteringGroupConverter extends GroupConverter
{
Map<ColumnPath, List<ValueInspector>> valueInspectorsByColumn,
Map<List<Integer>,
PrimitiveColumnIO> columnIOsByIndexFieldPath) {
- this.delegate = checkNotNull(delegate, "delegate");
- this.indexFieldPath = checkNotNull(indexFieldPath, "indexFieldPath");
- this.columnIOsByIndexFieldPath = checkNotNull(columnIOsByIndexFieldPath,
"columnIOsByIndexFieldPath");
- this.valueInspectorsByColumn = checkNotNull(valueInspectorsByColumn,
"valueInspectorsByColumn");
+ this.delegate = Objects.requireNonNull(delegate, "delegate cannot be
null");
+ this.indexFieldPath = Objects.requireNonNull(indexFieldPath,
"indexFieldPath cannot be null");
+ this.columnIOsByIndexFieldPath =
Objects.requireNonNull(columnIOsByIndexFieldPath, "columnIOsByIndexFieldPath
cannot be null");
+ this.valueInspectorsByColumn =
Objects.requireNonNull(valueInspectorsByColumn, "valueInspectorsByColumn cannot
be null");
}
// When a converter is asked for, we get the real one from the delegate,
then wrap it
@@ -69,7 +69,7 @@ public class FilteringGroupConverter extends GroupConverter {
public Converter getConverter(int fieldIndex) {
// get the real converter from the delegate
- Converter delegateConverter =
checkNotNull(delegate.getConverter(fieldIndex), "delegate converter");
+ Converter delegateConverter =
Objects.requireNonNull(delegate.getConverter(fieldIndex), "delegate converter
cannot be null");
// determine the indexFieldPath for the converter proxy we're about to
make, which is
// this converter's path + the requested fieldIndex
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringPrimitiveConverter.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringPrimitiveConverter.java
index 18edb64..97c8c8c 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringPrimitiveConverter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringPrimitiveConverter.java
@@ -23,7 +23,7 @@ import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicat
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.io.api.PrimitiveConverter;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* see {@link FilteringRecordMaterializer}
@@ -36,8 +36,8 @@ public class FilteringPrimitiveConverter extends
PrimitiveConverter {
private final ValueInspector[] valueInspectors;
public FilteringPrimitiveConverter(PrimitiveConverter delegate,
ValueInspector[] valueInspectors) {
- this.delegate = checkNotNull(delegate, "delegate");
- this.valueInspectors = checkNotNull(valueInspectors, "valueInspectors");
+ this.delegate = Objects.requireNonNull(delegate, "delegate cannot be
null");
+ this.valueInspectors = Objects.requireNonNull(valueInspectors,
"valueInspectors cannot be null");
}
// TODO: this works, but
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java
index d9d8e1d..9489dc4 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/FilteringRecordMaterializer.java
@@ -24,6 +24,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.stream.Collectors;
import org.apache.parquet.hadoop.metadata.ColumnPath;
@@ -32,8 +33,6 @@ import org.apache.parquet.io.PrimitiveColumnIO;
import org.apache.parquet.io.api.GroupConverter;
import org.apache.parquet.io.api.RecordMaterializer;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* A pass-through proxy for a {@link RecordMaterializer} that updates a {@link
IncrementallyUpdatedFilterPredicate}
* as it receives concrete values for the current record. If, after the record
assembly signals that
@@ -57,10 +56,10 @@ public class FilteringRecordMaterializer<T> extends
RecordMaterializer<T> {
Map<ColumnPath, List<ValueInspector>> valueInspectorsByColumn,
IncrementallyUpdatedFilterPredicate filterPredicate) {
- checkNotNull(columnIOs, "columnIOs");
- checkNotNull(valueInspectorsByColumn, "valueInspectorsByColumn");
- this.filterPredicate = checkNotNull(filterPredicate, "filterPredicate");
- this.delegate = checkNotNull(delegate, "delegate");
+ Objects.requireNonNull(columnIOs, "columnIOs cannot be null");
+ Objects.requireNonNull(valueInspectorsByColumn, "valueInspectorsByColumn
cannot be null");
+ this.filterPredicate = Objects.requireNonNull(filterPredicate,
"filterPredicate cannot be null");
+ this.delegate = Objects.requireNonNull(delegate, "delegate cannot be
null");
// keep track of which path of indices leads to which primitive column
Map<List<Integer>, PrimitiveColumnIO> columnIOsByIndexFieldPath = new
HashMap<>();
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
index 84fd0f4..a280e77 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicate.java
@@ -20,7 +20,7 @@ package org.apache.parquet.filter2.recordlevel;
import org.apache.parquet.io.api.Binary;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* A rewritten version of a {@link
org.apache.parquet.filter2.predicate.FilterPredicate} which receives
@@ -129,8 +129,8 @@ public interface IncrementallyUpdatedFilterPredicate {
private final IncrementallyUpdatedFilterPredicate right;
BinaryLogical(IncrementallyUpdatedFilterPredicate left,
IncrementallyUpdatedFilterPredicate right) {
- this.left = checkNotNull(left, "left");
- this.right = checkNotNull(right, "right");
+ this.left = Objects.requireNonNull(left, "left cannot be null");
+ this.right = Objects.requireNonNull(right, "right cannot be null");
}
public final IncrementallyUpdatedFilterPredicate getLeft() {
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateEvaluator.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateEvaluator.java
index d1aa66c..24b82ed 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateEvaluator.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateEvaluator.java
@@ -23,7 +23,7 @@ import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicat
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.Visitor;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* Determines whether an {@link IncrementallyUpdatedFilterPredicate} is
satisfied or not.
@@ -37,8 +37,7 @@ public class IncrementallyUpdatedFilterPredicateEvaluator
implements Visitor {
private static final IncrementallyUpdatedFilterPredicateEvaluator INSTANCE =
new IncrementallyUpdatedFilterPredicateEvaluator();
public static boolean evaluate(IncrementallyUpdatedFilterPredicate pred) {
- checkNotNull(pred, "pred");
- return pred.accept(INSTANCE);
+ return Objects.requireNonNull(pred, "pred cannot be
null").accept(INSTANCE);
}
private IncrementallyUpdatedFilterPredicateEvaluator() {}
diff --git
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateResetter.java
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateResetter.java
index a75731a..2dacfb0 100644
---
a/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateResetter.java
+++
b/parquet-column/src/main/java/org/apache/parquet/filter2/recordlevel/IncrementallyUpdatedFilterPredicateResetter.java
@@ -23,7 +23,7 @@ import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicat
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.ValueInspector;
import
org.apache.parquet.filter2.recordlevel.IncrementallyUpdatedFilterPredicate.Visitor;
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
/**
* Resets all the {@link ValueInspector}s in a {@link
IncrementallyUpdatedFilterPredicate}.
@@ -32,8 +32,7 @@ public final class
IncrementallyUpdatedFilterPredicateResetter implements Visito
private static final IncrementallyUpdatedFilterPredicateResetter INSTANCE =
new IncrementallyUpdatedFilterPredicateResetter();
public static void reset(IncrementallyUpdatedFilterPredicate pred) {
- checkNotNull(pred, "pred");
- pred.accept(INSTANCE);
+ Objects.requireNonNull(pred, "pred cannot be null").accept(INSTANCE);
}
private IncrementallyUpdatedFilterPredicateResetter() { }
diff --git
a/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java
b/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java
index 15d79c3..bad39ff 100644
--- a/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java
+++ b/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java
@@ -24,6 +24,7 @@ import java.util.BitSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.column.ColumnWriteStore;
import org.apache.parquet.column.ColumnWriter;
@@ -50,8 +51,6 @@ import it.unimi.dsi.fastutil.ints.IntIterator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* Message level of the IO structure
*/
@@ -99,9 +98,9 @@ public class MessageColumnIO extends GroupColumnIO {
public <T> RecordReader<T> getRecordReader(final PageReadStore columns,
final RecordMaterializer<T>
recordMaterializer,
final Filter filter) {
- checkNotNull(columns, "columns");
- checkNotNull(recordMaterializer, "recordMaterializer");
- checkNotNull(filter, "filter");
+ Objects.requireNonNull(columns, "columns cannot be null");
+ Objects.requireNonNull(recordMaterializer, "recordMaterializer cannot be
null");
+ Objects.requireNonNull(filter, "filter cannot be null");
if (leaves.isEmpty()) {
return new EmptyRecordReader<>(recordMaterializer);
diff --git
a/parquet-column/src/main/java/org/apache/parquet/schema/ColumnOrder.java
b/parquet-column/src/main/java/org/apache/parquet/schema/ColumnOrder.java
index 144a93a..94a1275 100644
--- a/parquet-column/src/main/java/org/apache/parquet/schema/ColumnOrder.java
+++ b/parquet-column/src/main/java/org/apache/parquet/schema/ColumnOrder.java
@@ -18,7 +18,7 @@
*/
package org.apache.parquet.schema;
-import org.apache.parquet.Preconditions;
+import java.util.Objects;
/**
* Class representing the column order with all the related parameters.
@@ -61,7 +61,7 @@ public class ColumnOrder {
private final ColumnOrderName columnOrderName;
private ColumnOrder(ColumnOrderName columnOrderName) {
- this.columnOrderName = Preconditions.checkNotNull(columnOrderName,
"columnOrderName");
+ this.columnOrderName = Objects.requireNonNull(columnOrderName,
"columnOrderName cannot be null");
}
public ColumnOrderName getColumnOrderName() {
diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/Type.java
b/parquet-column/src/main/java/org/apache/parquet/schema/Type.java
index 0b6d905..310227a 100644
--- a/parquet-column/src/main/java/org/apache/parquet/schema/Type.java
+++ b/parquet-column/src/main/java/org/apache/parquet/schema/Type.java
@@ -18,9 +18,8 @@
*/
package org.apache.parquet.schema;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
import java.util.List;
+import java.util.Objects;
import org.apache.parquet.io.InvalidRecordException;
@@ -171,8 +170,8 @@ abstract public class Type {
Type(String name, Repetition repetition, OriginalType originalType,
DecimalMetadata decimalMetadata, ID id) {
super();
- this.name = checkNotNull(name, "name");
- this.repetition = checkNotNull(repetition, "repetition");
+ this.name = Objects.requireNonNull(name, "name cannot be null");
+ this.repetition = Objects.requireNonNull(repetition, "repetition cannot be
null");
this.logicalTypeAnnotation = originalType == null ? null :
LogicalTypeAnnotation.fromOriginalType(originalType, decimalMetadata);
this.id = id;
}
@@ -183,8 +182,8 @@ abstract public class Type {
Type(String name, Repetition repetition, LogicalTypeAnnotation
logicalTypeAnnotation, ID id) {
super();
- this.name = checkNotNull(name, "name");
- this.repetition = checkNotNull(repetition, "repetition");
+ this.name = Objects.requireNonNull(name, "name cannot be null");
+ this.repetition = Objects.requireNonNull(repetition, "repetition cannot be
null");
this.logicalTypeAnnotation = logicalTypeAnnotation;
this.id = id;
}
diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/Types.java
b/parquet-column/src/main/java/org/apache/parquet/schema/Types.java
index 28773ae..c62010a 100644
--- a/parquet-column/src/main/java/org/apache/parquet/schema/Types.java
+++ b/parquet-column/src/main/java/org/apache/parquet/schema/Types.java
@@ -21,6 +21,7 @@ package org.apache.parquet.schema;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.Optional;
import org.apache.parquet.Preconditions;
@@ -235,8 +236,7 @@ public class Types {
protected final THIS repetition(Type.Repetition repetition) {
Preconditions.checkArgument(!repetitionAlreadySet,
"Repetition has already been set");
- Preconditions.checkNotNull(repetition, "Repetition cannot be null");
- this.repetition = repetition;
+ this.repetition = Objects.requireNonNull(repetition, "Repetition cannot
be null");
this.repetitionAlreadySet = true;
return self();
}
@@ -308,8 +308,8 @@ public class Types {
* @return the parent {@code GroupBuilder} or the constructed {@code Type}
*/
public P named(String name) {
- Preconditions.checkNotNull(name, "Name is required");
- Preconditions.checkNotNull(repetition, "Repetition is required");
+ Objects.requireNonNull(name, "Name is required");
+ Objects.requireNonNull(repetition, "Repetition is required");
Type type = build(name);
if (parent != null) {
@@ -1322,7 +1322,7 @@ public class Types {
protected Type build(String name) {
Preconditions.checkState(logicalTypeAnnotation == null,
"LIST is already the logical type and can't be changed");
- Preconditions.checkNotNull(elementType, "List element type");
+ Objects.requireNonNull(elementType, "List element type cannot be null");
GroupBuilder<GroupType> builder =
buildGroup(repetition).as(OriginalType.LIST);
if (id != null) {
@@ -1421,7 +1421,7 @@ public class Types {
*/
@Override
public MessageType named(String name) {
- Preconditions.checkNotNull(name, "Name is required");
+ Objects.requireNonNull(name, "Name is required");
return new MessageType(name, fields);
}
}
diff --git
a/parquet-column/src/test/java/org/apache/parquet/column/page/mem/MemPageReader.java
b/parquet-column/src/test/java/org/apache/parquet/column/page/mem/MemPageReader.java
index 5373c9a..b664ad8 100644
---
a/parquet-column/src/test/java/org/apache/parquet/column/page/mem/MemPageReader.java
+++
b/parquet-column/src/test/java/org/apache/parquet/column/page/mem/MemPageReader.java
@@ -18,9 +18,8 @@
*/
package org.apache.parquet.column.page.mem;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
import java.util.Iterator;
+import java.util.Objects;
import org.apache.parquet.column.page.DictionaryPage;
import org.apache.parquet.column.page.DataPage;
@@ -39,9 +38,8 @@ public class MemPageReader implements PageReader {
public MemPageReader(long totalValueCount, Iterator<DataPage> pages,
DictionaryPage dictionaryPage) {
super();
- checkNotNull(pages, "pages");
+ this.pages = Objects.requireNonNull(pages, "pages cannot be null");
this.totalValueCount = totalValueCount;
- this.pages = pages;
this.dictionaryPage = dictionaryPage;
}
diff --git a/parquet-common/src/main/java/org/apache/parquet/Preconditions.java
b/parquet-common/src/main/java/org/apache/parquet/Preconditions.java
index 2827b46..0aa6a59 100644
--- a/parquet-common/src/main/java/org/apache/parquet/Preconditions.java
+++ b/parquet-common/src/main/java/org/apache/parquet/Preconditions.java
@@ -18,6 +18,8 @@
*/
package org.apache.parquet;
+import java.util.Objects;
+
/**
* Utility for parameter validation
*/
@@ -30,7 +32,9 @@ public final class Preconditions {
* @param <T> the type of the object
* @return the validated o
* @throws NullPointerException if o is null
+ * @deprecated Use JDK {@link Objects#requireNonNull(Object, String)}
*/
+ @Deprecated
public static <T> T checkNotNull(T o, String name) throws
NullPointerException {
if (o == null) {
throw new NullPointerException(name + " should not be null");
diff --git
a/parquet-common/src/main/java/org/apache/parquet/glob/WildcardPath.java
b/parquet-common/src/main/java/org/apache/parquet/glob/WildcardPath.java
index e1e4bd3..4abe349 100644
--- a/parquet-common/src/main/java/org/apache/parquet/glob/WildcardPath.java
+++ b/parquet-common/src/main/java/org/apache/parquet/glob/WildcardPath.java
@@ -18,10 +18,9 @@
*/
package org.apache.parquet.glob;
+import java.util.Objects;
import java.util.regex.Pattern;
-import org.apache.parquet.Preconditions;
-
/**
* Holds a String with wildcards ('*'), and can answer whether a given string
matches this WildcardPath.
* For example:
@@ -47,8 +46,8 @@ public class WildcardPath {
private final Pattern pattern;
public WildcardPath(String parentGlobPath, String wildcardPath, char delim) {
- this.parentGlobPath = Preconditions.checkNotNull(parentGlobPath,
"parentGlobPath");
- this.originalPattern = Preconditions.checkNotNull(wildcardPath,
"wildcardPath");
+ this.parentGlobPath = Objects.requireNonNull(parentGlobPath,
"parentGlobPath cannot be null");
+ this.originalPattern = Objects.requireNonNull(wildcardPath, "wildcardPath
cannot be null");
this.pattern = Pattern.compile(buildRegex(wildcardPath, delim));
}
diff --git
a/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/ColumnPath.java
b/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/ColumnPath.java
index 0b3f365..65a23eb 100644
---
a/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/ColumnPath.java
+++
b/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/ColumnPath.java
@@ -21,8 +21,7 @@ package org.apache.parquet.hadoop.metadata;
import java.io.Serializable;
import java.util.Arrays;
import java.util.Iterator;
-
-import static org.apache.parquet.Preconditions.checkNotNull;
+import java.util.Objects;
public final class ColumnPath implements Iterable<String>, Serializable {
@@ -38,7 +37,7 @@ public final class ColumnPath implements Iterable<String>,
Serializable {
};
public static ColumnPath fromDotString(String path) {
- checkNotNull(path, "path");
+ Objects.requireNonNull(path, "path cannot be null");
return get(path.split("\\."));
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java
index d1d40e9..e3cd9cb 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/compat/RowGroupFilter.java
@@ -21,6 +21,7 @@ package org.apache.parquet.filter2.compat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import org.apache.parquet.filter2.compat.FilterCompat.Filter;
import org.apache.parquet.filter2.compat.FilterCompat.NoOpFilter;
@@ -33,8 +34,6 @@ import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.metadata.BlockMetaData;
import org.apache.parquet.schema.MessageType;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* Given a {@link Filter} applies it to a list of BlockMetaData (row groups)
* If the Filter is an {@link org.apache.parquet.filter.UnboundRecordFilter}
or the no op filter,
@@ -60,26 +59,26 @@ public class RowGroupFilter implements
Visitor<List<BlockMetaData>> {
*/
@Deprecated
public static List<BlockMetaData> filterRowGroups(Filter filter,
List<BlockMetaData> blocks, MessageType schema) {
- checkNotNull(filter, "filter");
+ Objects.requireNonNull(filter, "filter cannot be null");
return filter.accept(new RowGroupFilter(blocks, schema));
}
public static List<BlockMetaData> filterRowGroups(List<FilterLevel> levels,
Filter filter, List<BlockMetaData> blocks, ParquetFileReader reader) {
- checkNotNull(filter, "filter");
+ Objects.requireNonNull(filter, "filter cannot be null");
return filter.accept(new RowGroupFilter(levels, blocks, reader));
}
@Deprecated
private RowGroupFilter(List<BlockMetaData> blocks, MessageType schema) {
- this.blocks = checkNotNull(blocks, "blocks");
- this.schema = checkNotNull(schema, "schema");
+ this.blocks = Objects.requireNonNull(blocks, "blocks cannnot be null");
+ this.schema = Objects.requireNonNull(schema, "schema cannnot be null");
this.levels = Collections.singletonList(FilterLevel.STATISTICS);
this.reader = null;
}
private RowGroupFilter(List<FilterLevel> levels, List<BlockMetaData> blocks,
ParquetFileReader reader) {
- this.blocks = checkNotNull(blocks, "blocks");
- this.reader = checkNotNull(reader, "reader");
+ this.blocks = Objects.requireNonNull(blocks, "blocks cannnot be null");
+ this.reader = Objects.requireNonNull(reader, "reader cannnot be null");
this.schema = reader.getFileMetaData().getSchema();
this.levels = levels;
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilter.java
index c43380b..2f69fa6 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/dictionarylevel/DictionaryFilter.java
@@ -39,12 +39,10 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.function.IntFunction;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
-
/**
* Applies filters based on the contents of column dictionaries.
*/
@@ -55,8 +53,8 @@ public class DictionaryFilter implements
FilterPredicate.Visitor<Boolean> {
private static final boolean BLOCK_CANNOT_MATCH = true;
public static boolean canDrop(FilterPredicate pred,
List<ColumnChunkMetaData> columns, DictionaryPageReadStore dictionaries) {
- checkNotNull(pred, "pred");
- checkNotNull(columns, "columns");
+ Objects.requireNonNull(pred, "pred cannnot be null");
+ Objects.requireNonNull(columns, "columns cannnot be null");
return pred.accept(new DictionaryFilter(columns, dictionaries));
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/statisticslevel/StatisticsFilter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/statisticslevel/StatisticsFilter.java
index 31b6c45..4db2eb9 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/filter2/statisticslevel/StatisticsFilter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/filter2/statisticslevel/StatisticsFilter.java
@@ -21,6 +21,7 @@ package org.apache.parquet.filter2.statisticslevel;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.column.statistics.Statistics;
import org.apache.parquet.hadoop.metadata.ColumnPath;
@@ -40,8 +41,6 @@ import
org.apache.parquet.filter2.predicate.Operators.UserDefined;
import org.apache.parquet.filter2.predicate.UserDefinedPredicate;
import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* Applies a {@link org.apache.parquet.filter2.predicate.FilterPredicate} to
statistics about a group of
* records.
@@ -67,8 +66,8 @@ public class StatisticsFilter implements
FilterPredicate.Visitor<Boolean> {
private static final boolean BLOCK_CANNOT_MATCH = true;
public static boolean canDrop(FilterPredicate pred,
List<ColumnChunkMetaData> columns) {
- checkNotNull(pred, "pred");
- checkNotNull(columns, "columns");
+ Objects.requireNonNull(pred, "pred cannot be null");
+ Objects.requireNonNull(columns, "columns cannot be null");
return pred.accept(new StatisticsFilter(columns));
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/DirectCodecFactory.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/DirectCodecFactory.java
index 007c374..d5f13e2 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/DirectCodecFactory.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/DirectCodecFactory.java
@@ -26,6 +26,7 @@ import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.commons.pool.BasePoolableObjectFactory;
import org.apache.commons.pool.impl.GenericObjectPool;
@@ -77,14 +78,16 @@ class DirectCodecFactory extends CodecFactory implements
AutoCloseable {
* See docs on CodecFactory#createDirectCodecFactory which is how this class
is
* exposed publicly and is just a pass-through factory method for this
constructor
* to hide the rest of this class from public access.
+ *
+ * @throws NullPointerException if allocator is {@code null}
*/
DirectCodecFactory(Configuration config, ByteBufferAllocator allocator, int
pageSize) {
super(config, pageSize);
- Preconditions.checkNotNull(allocator, "allocator");
+
+ this.allocator = Objects.requireNonNull(allocator, "allocator cannot be
null");
Preconditions.checkState(allocator.isDirect(),
"A %s requires a direct buffer allocator be provided.",
getClass().getSimpleName());
- this.allocator = allocator;
}
private ByteBuffer ensure(ByteBuffer buffer, int size) {
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java
index 80debd7..6e3b088 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordReader.java
@@ -23,6 +23,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
@@ -47,7 +48,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static
org.apache.parquet.hadoop.ParquetInputFormat.RECORD_FILTERING_ENABLED;
import static
org.apache.parquet.hadoop.ParquetInputFormat.STRICT_TYPE_CHECKING;
@@ -87,7 +87,7 @@ class InternalParquetRecordReader<T> {
*/
public InternalParquetRecordReader(ReadSupport<T> readSupport, Filter
filter) {
this.readSupport = readSupport;
- this.filter = checkNotNull(filter, "filter");
+ this.filter = Objects.requireNonNull(filter, "filter cannot be null");
}
/**
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
index c3da323..bd56025 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/InternalParquetRecordWriter.java
@@ -20,11 +20,11 @@ package org.apache.parquet.hadoop;
import static java.lang.Math.max;
import static java.lang.Math.min;
-import static org.apache.parquet.Preconditions.checkNotNull;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.column.ColumnWriteStore;
import org.apache.parquet.column.ParquetProperties;
@@ -84,7 +84,7 @@ class InternalParquetRecordWriter<T> {
boolean validating,
ParquetProperties props) {
this.parquetFileWriter = parquetFileWriter;
- this.writeSupport = checkNotNull(writeSupport, "writeSupport");
+ this.writeSupport = Objects.requireNonNull(writeSupport, "writeSupport
cannot be null");
this.schema = schema;
this.extraMetaData = extraMetaData;
this.rowGroupSize = rowGroupSize;
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
index d6fabb2..0c86acc 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
@@ -19,7 +19,6 @@
package org.apache.parquet.hadoop;
import org.apache.parquet.ParquetRuntimeException;
-import org.apache.parquet.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -27,6 +26,7 @@ import java.lang.management.ManagementFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
/**
* Implements a memory manager that keeps a global context of how many Parquet
@@ -168,8 +168,8 @@ public class MemoryManager {
* @param callBack the callback passed in from upper layer, such as Hive.
*/
public void registerScaleCallBack(String callBackName, Runnable callBack) {
- Preconditions.checkNotNull(callBackName, "callBackName");
- Preconditions.checkNotNull(callBack, "callBack");
+ Objects.requireNonNull(callBackName, "callBackName cannot be null");
+ Objects.requireNonNull(callBack, "callBack cannot be null");
if (callBacks.containsKey(callBackName)) {
throw new IllegalArgumentException("The callBackName " + callBackName +
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
index 676e2ca..3f4b93c 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetOutputFormat.java
@@ -18,11 +18,11 @@
*/
package org.apache.parquet.hadoop;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static org.apache.parquet.hadoop.ParquetWriter.DEFAULT_BLOCK_SIZE;
import static org.apache.parquet.hadoop.util.ContextUtil.getConfiguration;
import java.io.IOException;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -502,7 +502,9 @@ public class ParquetOutputFormat<T> extends
FileOutputFormat<Void, T> {
if (writeSupport != null) return writeSupport;
Class<?> writeSupportClass = getWriteSupportClass(configuration);
try {
- return (WriteSupport<T>)checkNotNull(writeSupportClass,
"writeSupportClass").newInstance();
+ return (WriteSupport<T>) Objects
+ .requireNonNull(writeSupportClass, "writeSupportClass cannot be
null")
+ .newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new BadConfigurationException("could not instantiate write support
class: " + writeSupportClass, e);
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetReader.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetReader.java
index 28e1967..dbee210 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetReader.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetReader.java
@@ -18,14 +18,13 @@
*/
package org.apache.parquet.hadoop;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -110,7 +109,7 @@ public class ParquetReader<T> implements Closeable {
FilterCompat.Filter filter) throws IOException {
this(Collections.singletonList((InputFile) HadoopInputFile.fromPath(file,
conf)),
HadoopReadOptions.builder(conf)
- .withRecordFilter(checkNotNull(filter, "filter"))
+ .withRecordFilter(Objects.requireNonNull(filter, "filter cannot be
null"))
.build(),
readSupport);
}
@@ -182,9 +181,9 @@ public class ParquetReader<T> implements Closeable {
@Deprecated
private Builder(ReadSupport<T> readSupport, Path path) {
- this.readSupport = checkNotNull(readSupport, "readSupport");
+ this.readSupport = Objects.requireNonNull(readSupport, "readSupport
cannot be null");
this.file = null;
- this.path = checkNotNull(path, "path");
+ this.path = Objects.requireNonNull(path, "path cannot be null");
this.conf = new Configuration();
this.optionsBuilder = HadoopReadOptions.builder(conf);
}
@@ -193,14 +192,14 @@ public class ParquetReader<T> implements Closeable {
protected Builder(Path path) {
this.readSupport = null;
this.file = null;
- this.path = checkNotNull(path, "path");
+ this.path = Objects.requireNonNull(path, "path cannot be null");
this.conf = new Configuration();
this.optionsBuilder = HadoopReadOptions.builder(conf);
}
protected Builder(InputFile file) {
this.readSupport = null;
- this.file = checkNotNull(file, "file");
+ this.file = Objects.requireNonNull(file, "file cannot be null");
this.path = null;
if (file instanceof HadoopInputFile) {
this.conf = ((HadoopInputFile) file).getConfiguration();
@@ -212,7 +211,7 @@ public class ParquetReader<T> implements Closeable {
// when called, resets options to the defaults from conf
public Builder<T> withConf(Configuration conf) {
- this.conf = checkNotNull(conf, "conf");
+ this.conf = Objects.requireNonNull(conf, "conf cannot be null");
// previous versions didn't use the builder, so may set filter before
conf. this maintains
// compatibility for filter. other options are reset by a new conf.
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordWriter.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordWriter.java
index 2542402..a6dabc4 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordWriter.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetRecordWriter.java
@@ -20,6 +20,7 @@ package org.apache.parquet.hadoop;
import java.io.IOException;
import java.util.Map;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.RecordWriter;
@@ -32,8 +33,6 @@ import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* Writes records to a Parquet file
*
@@ -121,7 +120,7 @@ public class ParquetRecordWriter<T> extends
RecordWriter<Void, T> {
.build();
internalWriter = new InternalParquetRecordWriter<T>(w, writeSupport,
schema,
extraMetaData, blockSize, compressor, validating, props);
- this.memoryManager = checkNotNull(memoryManager, "memoryManager");
+ this.memoryManager = Objects.requireNonNull(memoryManager, "memoryManager
cannot be null");
memoryManager.addWriter(internalWriter, blockSize);
this.codecFactory = null;
}
@@ -152,7 +151,7 @@ public class ParquetRecordWriter<T> extends
RecordWriter<Void, T> {
internalWriter = new InternalParquetRecordWriter<T>(w, writeSupport,
schema,
extraMetaData, blockSize, codecFactory.getCompressor(codec),
validating,
props);
- this.memoryManager = checkNotNull(memoryManager, "memoryManager");
+ this.memoryManager = Objects.requireNonNull(memoryManager, "memoryManager
cannot be null");
memoryManager.addWriter(internalWriter, blockSize);
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
index c08882f..9549d5f 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/api/WriteSupport.java
@@ -18,11 +18,10 @@
*/
package org.apache.parquet.hadoop.api;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
@@ -47,11 +46,14 @@ abstract public class WriteSupport<T> {
/**
* @param schema the schema of the data
* @param extraMetaData application specific metadata to add in the file
+ *
+ * @throws NullPointerException if schema or extraMetaData is {@code null}
*/
public WriteContext(MessageType schema, Map<String, String> extraMetaData)
{
super();
- this.schema = checkNotNull(schema, "schema");
- this.extraMetaData =
Collections.unmodifiableMap(checkNotNull(extraMetaData, "extraMetaData"));
+ this.schema = Objects.requireNonNull(schema, "schema cannot be null");
+ this.extraMetaData = Collections.unmodifiableMap(Objects
+ .requireNonNull(extraMetaData, "extraMetaData cannot be null"));
}
/**
* @return the schema of the file
@@ -78,10 +80,13 @@ abstract public class WriteSupport<T> {
/**
* @param extraMetaData application specific metadata to add in the file
+ *
+ * @throws NullPointerException if extraMetaData is {@code null}
*/
public FinalizedWriteContext(Map<String, String> extraMetaData) {
super();
- this.extraMetaData =
Collections.unmodifiableMap(checkNotNull(extraMetaData, "extraMetaData"));
+ this.extraMetaData = Collections.unmodifiableMap(Objects
+ .requireNonNull(extraMetaData, "extraMetaData cannot be null"));
}
/**
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/example/GroupWriteSupport.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/example/GroupWriteSupport.java
index c038f25..dfed676 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/example/GroupWriteSupport.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/example/GroupWriteSupport.java
@@ -18,21 +18,19 @@
*/
package org.apache.parquet.hadoop.example;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static org.apache.parquet.schema.MessageTypeParser.parseMessageType;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
-import org.apache.parquet.Preconditions;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.GroupWriter;
import org.apache.parquet.hadoop.api.WriteSupport;
import org.apache.parquet.io.api.RecordConsumer;
import org.apache.parquet.schema.MessageType;
-import org.apache.parquet.schema.MessageTypeParser;
public class GroupWriteSupport extends WriteSupport<Group> {
@@ -43,7 +41,7 @@ public class GroupWriteSupport extends WriteSupport<Group> {
}
public static MessageType getSchema(Configuration configuration) {
- return
parseMessageType(checkNotNull(configuration.get(PARQUET_EXAMPLE_SCHEMA),
PARQUET_EXAMPLE_SCHEMA));
+ return
parseMessageType(Objects.requireNonNull(configuration.get(PARQUET_EXAMPLE_SCHEMA),
PARQUET_EXAMPLE_SCHEMA));
}
private MessageType schema;
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/FileMetaData.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/FileMetaData.java
index 9d02bf8..ca9488f 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/FileMetaData.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/FileMetaData.java
@@ -19,10 +19,10 @@
package org.apache.parquet.hadoop.metadata;
import static java.util.Collections.unmodifiableMap;
-import static org.apache.parquet.Preconditions.checkNotNull;
import java.io.Serializable;
import java.util.Map;
+import java.util.Objects;
import org.apache.parquet.schema.MessageType;
@@ -43,11 +43,14 @@ public final class FileMetaData implements Serializable {
* @param schema the schema for the file
* @param keyValueMetaData the app specific metadata
* @param createdBy the description of the library that created the file
+ *
+ * @throws NullPointerException if schema or keyValueMetaData is {@code null}
*/
public FileMetaData(MessageType schema, Map<String, String>
keyValueMetaData, String createdBy) {
super();
- this.schema = checkNotNull(schema, "schema");
- this.keyValueMetaData = unmodifiableMap(checkNotNull(keyValueMetaData,
"keyValueMetaData"));
+ this.schema = Objects.requireNonNull(schema, "schema cannot be null");
+ this.keyValueMetaData = unmodifiableMap(Objects
+ .requireNonNull(keyValueMetaData, "keyValueMetaData cannot be null"));
this.createdBy = createdBy;
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/GlobalMetaData.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/GlobalMetaData.java
index 740405d..311e323 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/GlobalMetaData.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/GlobalMetaData.java
@@ -19,11 +19,11 @@
package org.apache.parquet.hadoop.metadata;
import static java.util.Collections.unmodifiableMap;
-import static org.apache.parquet.Preconditions.checkNotNull;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
+import java.util.Objects;
import java.util.Map.Entry;
import java.util.Set;
@@ -46,11 +46,14 @@ public class GlobalMetaData implements Serializable {
* @param schema the union of the schemas for all the files
* @param keyValueMetaData the merged app specific metadata
* @param createdBy the description of the library that created the file
+ *
+ * @throws NullPointerException if schema or keyValueMetaData is {@code null}
*/
public GlobalMetaData(MessageType schema, Map<String, Set<String>>
keyValueMetaData, Set<String> createdBy) {
super();
- this.schema = checkNotNull(schema, "schema");
- this.keyValueMetaData = unmodifiableMap(checkNotNull(keyValueMetaData,
"keyValueMetaData"));
+ this.schema = Objects.requireNonNull(schema, "schema cannot be null");
+ this.keyValueMetaData = unmodifiableMap(Objects
+ .requireNonNull(keyValueMetaData, "keyValueMetaData cannot be null"));
this.createdBy = createdBy;
}
diff --git
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
index 37e285d..40f12fe 100644
---
a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
+++
b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/util/HadoopStreams.java
@@ -21,7 +21,6 @@ package org.apache.parquet.hadoop.util;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.parquet.Preconditions;
import org.apache.parquet.io.ParquetDecodingException;
import org.apache.parquet.io.SeekableInputStream;
import org.apache.parquet.io.PositionOutputStream;
@@ -30,6 +29,7 @@ import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
+import java.util.Objects;
/**
* Convenience methods to get Parquet abstractions for Hadoop data streams.
@@ -49,7 +49,7 @@ public class HadoopStreams {
* @return a SeekableInputStream
*/
public static SeekableInputStream wrap(FSDataInputStream stream) {
- Preconditions.checkNotNull(stream, "Cannot wrap a null input stream");
+ Objects.requireNonNull(stream, "Cannot wrap a null input stream");
if (byteBufferReadableClass != null && h2SeekableConstructor != null &&
byteBufferReadableClass.isInstance(stream.getWrappedStream())) {
try {
@@ -104,7 +104,7 @@ public class HadoopStreams {
* @return a SeekableOutputStream
*/
public static PositionOutputStream wrap(FSDataOutputStream stream) {
- Preconditions.checkNotNull(stream, "Cannot wrap a null output stream");
+ Objects.requireNonNull(stream, "Cannot wrap a null output stream");
return new HadoopPositionOutputStream(stream);
}
}
diff --git
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ConvertedField.java
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ConvertedField.java
index 8a0bbca..36ead1e 100644
--- a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ConvertedField.java
+++ b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ConvertedField.java
@@ -18,12 +18,12 @@
*/
package org.apache.parquet.thrift;
+import java.util.Objects;
+
import org.apache.parquet.ShouldNeverHappenException;
import org.apache.parquet.schema.Type;
import org.apache.parquet.thrift.projection.FieldsPath;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* This is the return value for the recursion done in {@link
ThriftSchemaConvertVisitor}
* It represents a field that has been converted from a {@link
org.apache.parquet.thrift.struct.ThriftType}
@@ -54,7 +54,7 @@ public interface ConvertedField {
private final FieldsPath path;
protected ConvertedFieldBase(FieldsPath path) {
- this.path = checkNotNull(path, "path");
+ this.path = Objects.requireNonNull(path, "path cannot be null");
}
@Override
@@ -101,7 +101,7 @@ public interface ConvertedField {
public Keep(FieldsPath path, Type type) {
super(path);
- this.type = checkNotNull(type, "type");
+ this.type = Objects.requireNonNull(type, "type cannot be null");
}
@Override
@@ -129,7 +129,7 @@ public interface ConvertedField {
public SentinelUnion(FieldsPath path, Type type) {
super(path);
- this.type = checkNotNull(type, "type");
+ this.type = Objects.requireNonNull(type, "type cannot be null");
}
@Override
diff --git
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftParquetReader.java
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftParquetReader.java
index 602d757..d5c1c74 100644
---
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftParquetReader.java
+++
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftParquetReader.java
@@ -19,6 +19,7 @@
package org.apache.parquet.thrift;
import java.io.IOException;
+import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
@@ -30,8 +31,6 @@ import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.hadoop.api.ReadSupport;
import org.apache.parquet.hadoop.thrift.ThriftReadSupport;
-import static org.apache.parquet.Preconditions.checkNotNull;
-
/**
* To read a parquet file into thrift objects
* @param <T> the thrift type
@@ -95,19 +94,19 @@ public class ThriftParquetReader<T extends TBase<?,?>>
extends ParquetReader<T>
private Class<T> thriftClass;
private Builder(Path file) {
- this.file = checkNotNull(file, "file");
+ this.file = Objects.requireNonNull(file, "file cannot be null");
this.conf = new Configuration();
this.filter = FilterCompat.NOOP;
this.thriftClass = null;
}
public Builder<T> withConf(Configuration conf) {
- this.conf = checkNotNull(conf, "conf");
+ this.conf = Objects.requireNonNull(conf, "conf cannot be null");
return this;
}
public Builder<T> withFilter(Filter filter) {
- this.filter = checkNotNull(filter, "filter");
+ this.filter = Objects.requireNonNull(filter, "filter cannot be null");
return this;
}
@@ -120,7 +119,7 @@ public class ThriftParquetReader<T extends TBase<?,?>>
extends ParquetReader<T>
* @return this for method chaining
*/
public Builder<T> withThriftClass(Class<T> thriftClass) {
- this.thriftClass = checkNotNull(thriftClass, "thriftClass");
+ this.thriftClass = Objects.requireNonNull(thriftClass, "thriftClass
cannot be null");
return this;
}
diff --git
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftSchemaConvertVisitor.java
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftSchemaConvertVisitor.java
index 4cd3cf5..c256880 100644
---
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftSchemaConvertVisitor.java
+++
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/ThriftSchemaConvertVisitor.java
@@ -20,6 +20,7 @@ package org.apache.parquet.thrift;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
import org.apache.parquet.ShouldNeverHappenException;
import org.apache.parquet.schema.GroupType;
@@ -52,7 +53,6 @@ import org.apache.parquet.thrift.struct.ThriftType.StringType;
import org.apache.parquet.thrift.struct.ThriftType.StructType;
import
org.apache.parquet.thrift.struct.ThriftType.StructType.StructOrUnionType;
-import static org.apache.parquet.Preconditions.checkNotNull;
import static org.apache.parquet.schema.ConversionPatterns.listType;
import static org.apache.parquet.schema.ConversionPatterns.mapType;
import static org.apache.parquet.schema.LogicalTypeAnnotation.enumType;
@@ -77,7 +77,8 @@ class ThriftSchemaConvertVisitor implements
ThriftType.StateVisitor<ConvertedFie
private final boolean keepOneOfEachUnion;
private ThriftSchemaConvertVisitor(FieldProjectionFilter
fieldProjectionFilter, boolean doProjection, boolean keepOneOfEachUnion) {
- this.fieldProjectionFilter = checkNotNull(fieldProjectionFilter,
"fieldProjectionFilter");
+ this.fieldProjectionFilter = Objects.requireNonNull(fieldProjectionFilter,
+ "fieldProjectionFilter cannot be null");
this.doProjection = doProjection;
this.keepOneOfEachUnion = keepOneOfEachUnion;
}
diff --git
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
index f0b11ca..ab32ddd 100644
---
a/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
+++
b/parquet-thrift/src/main/java/org/apache/parquet/thrift/projection/deprecated/DeprecatedFieldProjectionFilter.java
@@ -20,8 +20,8 @@ package org.apache.parquet.thrift.projection.deprecated;
import java.util.LinkedList;
import java.util.List;
+import java.util.Objects;
-import org.apache.parquet.Preconditions;
import org.apache.parquet.thrift.projection.FieldsPath;
import org.apache.parquet.thrift.projection.FieldProjectionFilter;
import org.apache.parquet.thrift.projection.ThriftProjectionException;
@@ -59,7 +59,7 @@ public class DeprecatedFieldProjectionFilter implements
FieldProjectionFilter {
}
public DeprecatedFieldProjectionFilter(String filterDescStr) {
- Preconditions.checkNotNull(filterDescStr, "filterDescStr");
+ Objects.requireNonNull(filterDescStr, "filterDescStr cannot be null");
filterPatterns = new LinkedList<PathGlobPatternStatus>();