This is an automated email from the ASF dual-hosted git repository. blue pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/parquet-mr.git
The following commit(s) were added to refs/heads/master by this push: new d61d221 PARQUET-1264: Fix javadoc warnings for Java 8. d61d221 is described below commit d61d221c9e752ce2cc0da65ede8b55653b3ae21f Author: Ryan Blue <b...@apache.org> AuthorDate: Fri Mar 30 17:51:23 2018 -0700 PARQUET-1264: Fix javadoc warnings for Java 8. --- .../java/org/apache/parquet/CorruptStatistics.java | 4 ++ .../org/apache/parquet/column/page/DataPageV1.java | 1 - .../org/apache/parquet/column/page/DataPageV2.java | 12 +++--- .../org/apache/parquet/column/page/PageWriter.java | 7 ++-- .../column/statistics/BinaryStatistics.java | 8 ++++ .../apache/parquet/column/values/ValuesReader.java | 2 +- .../apache/parquet/column/values/ValuesWriter.java | 2 +- .../values/bitpacking/BitPackingValuesWriter.java | 4 +- .../org/apache/parquet/filter/AndRecordFilter.java | 1 + .../apache/parquet/filter/ColumnRecordFilter.java | 1 + .../org/apache/parquet/filter/NotRecordFilter.java | 1 + .../org/apache/parquet/filter/OrRecordFilter.java | 1 + .../apache/parquet/filter/PagedRecordFilter.java | 1 + .../org/apache/parquet/filter/RecordFilter.java | 1 + .../apache/parquet/filter/UnboundRecordFilter.java | 3 ++ .../org/apache/parquet/io/MessageColumnIO.java | 7 +++- .../parquet/io/RecordConsumerLoggingWrapper.java | 2 +- .../java/org/apache/parquet/io/RecordReader.java | 1 + .../java/org/apache/parquet/io/api/Binary.java | 2 +- .../org/apache/parquet/io/api/RecordConsumer.java | 12 +++--- .../apache/parquet/schema/ConversionPatterns.java | 10 ++--- .../java/org/apache/parquet/schema/GroupType.java | 10 ++--- .../org/apache/parquet/schema/MessageType.java | 2 + .../org/apache/parquet/schema/PrimitiveType.java | 5 ++- .../main/java/org/apache/parquet/schema/Type.java | 9 +++-- .../main/java/org/apache/parquet/schema/Types.java | 7 +++- .../main/java/org/apache/parquet/Exceptions.java | 5 +++ .../java/org/apache/parquet/IOExceptionUtils.java | 2 +- .../src/main/java/org/apache/parquet/Log.java | 24 ++++++------ .../java/org/apache/parquet/Preconditions.java | 1 + .../src/main/java/org/apache/parquet/Strings.java | 12 ++++-- .../apache/parquet/bytes/ByteBufferAllocator.java | 2 + .../java/org/apache/parquet/bytes/BytesInput.java | 28 +++++++------- .../java/org/apache/parquet/bytes/BytesUtils.java | 33 +++++++++------- .../bytes/CapacityByteArrayOutputStream.java | 13 ++++++- .../parquet/bytes/LittleEndianDataInputStream.java | 44 +++++++++++----------- .../java/org/apache/parquet/glob/GlobExpander.java | 19 +++++++++- .../parquet/hadoop/metadata/Canonicalizer.java | 2 +- .../org/apache/parquet/io/SeekableInputStream.java | 6 ++- .../java/org/apache/parquet/util/DynMethods.java | 20 ++-------- .../column/values/bitpacking/BitPacking.java | 7 ++-- .../bitpacking/ByteBasedBitPackingEncoder.java | 7 ++-- .../column/values/bitpacking/BytePacker.java | 8 ++++ .../bitpacking/ByteBasedBitPackingGenerator.java | 2 +- .../apache/parquet/hadoop/codec/CodecConfig.java | 2 + .../parquet/hadoop/codec/SnappyDecompressor.java | 2 +- .../hadoop/metadata/ColumnChunkMetaData.java | 14 ++++++- .../hadoop/metadata/ColumnChunkProperties.java | 5 +++ .../parquet/hadoop/metadata/ParquetMetadata.java | 5 +-- 49 files changed, 240 insertions(+), 139 deletions(-) diff --git a/parquet-column/src/main/java/org/apache/parquet/CorruptStatistics.java b/parquet-column/src/main/java/org/apache/parquet/CorruptStatistics.java index 3e3aa3c..5724602 100644 --- a/parquet-column/src/main/java/org/apache/parquet/CorruptStatistics.java +++ b/parquet-column/src/main/java/org/apache/parquet/CorruptStatistics.java @@ -49,6 +49,10 @@ public class CorruptStatistics { /** * Decides if the statistics from a file created by createdBy (the created_by field from parquet format) * should be ignored because they are potentially corrupt. + * + * @param createdBy the created-by string from a file footer + * @param columnType the type of the column that this is checking + * @return true if the statistics may be invalid and should be ignored, false otherwise */ public static boolean shouldIgnoreStatistics(String createdBy, PrimitiveTypeName columnType) { diff --git a/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV1.java b/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV1.java index 6fbfe0c..56928c3 100755 --- a/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV1.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV1.java @@ -39,7 +39,6 @@ public class DataPageV1 extends DataPage { * @param rlEncoding the repetition level encoding for this page * @param dlEncoding the definition level encoding for this page * @param valuesEncoding the values encoding for this page - * @param dlEncoding */ public DataPageV1(BytesInput bytes, int valueCount, int uncompressedSize, Statistics<?> statistics, Encoding rlEncoding, Encoding dlEncoding, Encoding valuesEncoding) { super(Ints.checkedCast(bytes.size()), uncompressedSize, valueCount); diff --git a/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV2.java b/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV2.java index 13b64c3..62dac83 100644 --- a/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV2.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/page/DataPageV2.java @@ -26,9 +26,9 @@ import org.apache.parquet.column.statistics.Statistics; public class DataPageV2 extends DataPage { /** - * @param rowCount - * @param nullCount - * @param valueCount + * @param rowCount count of rows + * @param nullCount count of nulls + * @param valueCount count of values * @param repetitionLevels RLE encoded repetition levels * @param definitionLevels RLE encoded definition levels * @param dataEncoding encoding for the data @@ -51,9 +51,9 @@ public class DataPageV2 extends DataPage { } /** - * @param rowCount - * @param nullCount - * @param valueCount + * @param rowCount count of rows + * @param nullCount count of nulls + * @param valueCount count of values * @param repetitionLevels RLE encoded repetition levels * @param definitionLevels RLE encoded definition levels * @param dataEncoding encoding for the data diff --git a/parquet-column/src/main/java/org/apache/parquet/column/page/PageWriter.java b/parquet-column/src/main/java/org/apache/parquet/column/page/PageWriter.java index a1d8647..1d3f7ed 100644 --- a/parquet-column/src/main/java/org/apache/parquet/column/page/PageWriter.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/page/PageWriter.java @@ -41,7 +41,7 @@ public interface PageWriter { * @param rlEncoding repetition level encoding * @param dlEncoding definition level encoding * @param valuesEncoding values encoding - * @throws IOException + * @throws IOException if there is an exception while writing page data */ void writePage(BytesInput bytesInput, int valueCount, Statistics<?> statistics, Encoding rlEncoding, Encoding dlEncoding, Encoding valuesEncoding) throws IOException; @@ -55,7 +55,7 @@ public interface PageWriter { * @param dataEncoding the encoding for the data * @param data the data encoded with dataEncoding * @param statistics optional stats for this page - * @throws IOException + * @throws IOException if there is an exception while writing page data */ void writePageV2( int rowCount, int nullCount, int valueCount, @@ -70,13 +70,14 @@ public interface PageWriter { long getMemSize(); /** - * @return the allocated size for the buffer ( > getMemSize() ) + * @return the allocated size for the buffer ( > getMemSize() ) */ long allocatedSize(); /** * writes a dictionary page * @param dictionaryPage the dictionary page containing the dictionary data + * @throws IOException if there was an exception while writing */ void writeDictionaryPage(DictionaryPage dictionaryPage) throws IOException; diff --git a/parquet-column/src/main/java/org/apache/parquet/column/statistics/BinaryStatistics.java b/parquet-column/src/main/java/org/apache/parquet/column/statistics/BinaryStatistics.java index 8ffb585..6746729 100644 --- a/parquet-column/src/main/java/org/apache/parquet/column/statistics/BinaryStatistics.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/statistics/BinaryStatistics.java @@ -104,6 +104,8 @@ public class BinaryStatistics extends Statistics<Binary> { } /** + * @param min_value a min binary + * @param max_value a max binary * @deprecated use {@link #updateStats(Binary)}, will be removed in 2.0.0 */ @Deprecated @@ -113,6 +115,8 @@ public class BinaryStatistics extends Statistics<Binary> { } /** + * @param min_value a min binary + * @param max_value a max binary * @deprecated use {@link #updateStats(Binary)}, will be removed in 2.0.0 */ @Deprecated @@ -133,6 +137,7 @@ public class BinaryStatistics extends Statistics<Binary> { } /** + * @return the max binary * @deprecated use {@link #genericGetMax()}, will be removed in 2.0.0 */ @Deprecated @@ -141,6 +146,7 @@ public class BinaryStatistics extends Statistics<Binary> { } /** + * @return the min binary * @deprecated use {@link #genericGetMin()}, will be removed in 2.0.0 */ @Deprecated @@ -149,6 +155,8 @@ public class BinaryStatistics extends Statistics<Binary> { } /** + * @param min a min binary + * @param max a max binary * @deprecated use {@link #updateStats(Binary)}, will be removed in 2.0.0 */ @Deprecated diff --git a/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesReader.java b/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesReader.java index b2ec2a5..315b72e 100644 --- a/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesReader.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesReader.java @@ -53,7 +53,7 @@ public abstract class ValuesReader { * @param valueCount count of values in this page * @param in an input stream containing the page data at the correct offset * - * @throws IOException + * @throws IOException if there is an exception while reading from the input stream */ public abstract void initFromPage(int valueCount, ByteBufferInputStream in) throws IOException; diff --git a/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesWriter.java b/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesWriter.java index 3862b02..1bebd55 100755 --- a/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesWriter.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/values/ValuesWriter.java @@ -80,7 +80,7 @@ public abstract class ValuesWriter { } /** - * ( > {@link #getBufferedSize} ) + * ( > {@link #getBufferedSize} ) * @return the allocated size of the buffer */ abstract public long getAllocatedSize(); diff --git a/parquet-column/src/main/java/org/apache/parquet/column/values/bitpacking/BitPackingValuesWriter.java b/parquet-column/src/main/java/org/apache/parquet/column/values/bitpacking/BitPackingValuesWriter.java index 08751a0..fc0f185 100644 --- a/parquet-column/src/main/java/org/apache/parquet/column/values/bitpacking/BitPackingValuesWriter.java +++ b/parquet-column/src/main/java/org/apache/parquet/column/values/bitpacking/BitPackingValuesWriter.java @@ -46,7 +46,9 @@ public class BitPackingValuesWriter extends ValuesWriter { /** * @param bound the maximum value stored by this column - * @param pageSize + * @param initialCapacity initial capacity for the writer + * @param pageSize the page size + * @param allocator a buffer allocator */ public BitPackingValuesWriter(int bound, int initialCapacity, int pageSize, ByteBufferAllocator allocator) { this.bitsPerValue = getWidthFromMaxInt(bound); diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java index f19119b..0c2a295 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/AndRecordFilter.java @@ -37,6 +37,7 @@ public final class AndRecordFilter implements RecordFilter { * Returns builder for creating an and filter. * @param filter1 The first filter to check. * @param filter2 The second filter to check. + * @return an unbound and filter */ public static final UnboundRecordFilter and( final UnboundRecordFilter filter1, final UnboundRecordFilter filter2 ) { Preconditions.checkNotNull( filter1, "filter1" ); diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java index e0ba607..4e048ba 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/ColumnRecordFilter.java @@ -37,6 +37,7 @@ public final class ColumnRecordFilter implements RecordFilter { * * @param columnPath Dot separated path specifier, e.g. "engine.capacity" * @param predicate Should call getBinary etc. and check the value + * @return a column filter */ public static final UnboundRecordFilter column(final String columnPath, final ColumnPredicates.Predicate predicate) { diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java index 192a7f0..1605dba 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/NotRecordFilter.java @@ -33,6 +33,7 @@ public final class NotRecordFilter implements RecordFilter { /** * Returns builder for creating an and filter. * @param filter The filter to invert. + * @return a not record filter */ public static final UnboundRecordFilter not( final UnboundRecordFilter filter) { Preconditions.checkNotNull( filter, "filter" ); diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java index c1ece04..09db24a 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/OrRecordFilter.java @@ -35,6 +35,7 @@ public final class OrRecordFilter implements RecordFilter { * Returns builder for creating an and filter. * @param filter1 The first filter to check. * @param filter2 The second filter to check. + * @return an or record filter */ public static final UnboundRecordFilter or( final UnboundRecordFilter filter1, final UnboundRecordFilter filter2 ) { Preconditions.checkNotNull( filter1, "filter1" ); diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/PagedRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/PagedRecordFilter.java index 3a1891a..8aabf5c 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/PagedRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/PagedRecordFilter.java @@ -33,6 +33,7 @@ public final class PagedRecordFilter implements RecordFilter { * Returns builder for creating a paged query. * @param startPos The record to start from, numbering starts at 1. * @param pageSize The size of the page. + * @return a paged record filter */ public static final UnboundRecordFilter page( final long startPos, final long pageSize ) { return new UnboundRecordFilter() { diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/RecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/RecordFilter.java index b5eece8..e336645 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/RecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/RecordFilter.java @@ -28,6 +28,7 @@ public interface RecordFilter { /** * Works out whether the current record can pass through the filter. + * @return true if the current value for the column reader matches the predicate. */ boolean isMatch(); diff --git a/parquet-column/src/main/java/org/apache/parquet/filter/UnboundRecordFilter.java b/parquet-column/src/main/java/org/apache/parquet/filter/UnboundRecordFilter.java index 5ddec24..4699980 100644 --- a/parquet-column/src/main/java/org/apache/parquet/filter/UnboundRecordFilter.java +++ b/parquet-column/src/main/java/org/apache/parquet/filter/UnboundRecordFilter.java @@ -31,6 +31,9 @@ public interface UnboundRecordFilter { /** * Call to bind to actual columns and create filter. + * + * @param readers an iterable of readers to bind this filter to + * @return this unbound filter as a filter bound to the readers */ RecordFilter bind( Iterable<ColumnReader> readers); } diff --git a/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java b/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java index 7346c5a..1bec79b 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/MessageColumnIO.java @@ -84,7 +84,12 @@ public class MessageColumnIO extends GroupColumnIO { } /** - * @deprecated use {@link #getRecordReader(PageReadStore, RecordMaterializer, Filter)} + * @param columns a page read store with the column data + * @param recordMaterializer a record materializer + * @param filter a record filter + * @param <T> the type of records returned by the reader + * @return a record reader + * @deprecated use getRecordReader(PageReadStore, RecordMaterializer, Filter) */ @Deprecated public <T> RecordReader<T> getRecordReader(PageReadStore columns, diff --git a/parquet-column/src/main/java/org/apache/parquet/io/RecordConsumerLoggingWrapper.java b/parquet-column/src/main/java/org/apache/parquet/io/RecordConsumerLoggingWrapper.java index b90e216..9e3e967 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/RecordConsumerLoggingWrapper.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/RecordConsumerLoggingWrapper.java @@ -40,7 +40,7 @@ public class RecordConsumerLoggingWrapper extends RecordConsumer { /** * all calls a delegate to the wrapped delegate - * @param delegate + * @param delegate a wrapped record consumer that does the real work */ public RecordConsumerLoggingWrapper(RecordConsumer delegate) { this.delegate = delegate; diff --git a/parquet-column/src/main/java/org/apache/parquet/io/RecordReader.java b/parquet-column/src/main/java/org/apache/parquet/io/RecordReader.java index a9eb2f5..64bc9c8 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/RecordReader.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/RecordReader.java @@ -36,6 +36,7 @@ public abstract class RecordReader<T> { /** * Returns whether the current record should be skipped (dropped) * Will be called *after* read() + * @return true if the current record should be skipped */ public boolean shouldSkipCurrentRecord() { return false; diff --git a/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java b/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java index 9f5f0f2..85c82bd 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java @@ -75,7 +75,7 @@ abstract public class Binary implements Comparable<Binary>, Serializable { /** * @deprecated will be removed in 2.0.0. The comparison logic depends on the related logical type therefore this one * might not be correct. The {@link java.util.Comparator} implementation for the related type available at - * {@link Type#comparator()} shall be used instead. + * {@link org.apache.parquet.schema.PrimitiveType#comparator} should be used instead. */ @Deprecated abstract public int compareTo(Binary other); diff --git a/parquet-column/src/main/java/org/apache/parquet/io/api/RecordConsumer.java b/parquet-column/src/main/java/org/apache/parquet/io/api/RecordConsumer.java index e1ab60c..a8de336 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/api/RecordConsumer.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/api/RecordConsumer.java @@ -91,37 +91,37 @@ abstract public class RecordConsumer { /** * add an int value in the current field - * @param value + * @param value an int value */ abstract public void addInteger(int value); /** * add a long value in the current field - * @param value + * @param value a long value */ abstract public void addLong(long value); /** * add a boolean value in the current field - * @param value + * @param value a boolean value */ abstract public void addBoolean(boolean value); /** * add a binary value in the current field - * @param value + * @param value a binary value */ abstract public void addBinary(Binary value); /** * add a float value in the current field - * @param value + * @param value a float value */ abstract public void addFloat(float value); /** * add a double value in the current field - * @param value + * @param value a double value */ abstract public void addDouble(double value); diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/ConversionPatterns.java b/parquet-column/src/main/java/org/apache/parquet/schema/ConversionPatterns.java index 64534e1..6db1e58 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/ConversionPatterns.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/ConversionPatterns.java @@ -35,9 +35,9 @@ public abstract class ConversionPatterns { /** * to preserve the difference between empty list and null when optional * - * @param repetition + * @param repetition repetition for the list or map * @param alias name of the field - * @param originalType + * @param originalType original type for the list or map * @param nested the nested repeated field * @return a group type */ @@ -92,10 +92,10 @@ public abstract class ConversionPatterns { } /** - * @param repetition + * @param repetition repetition for the list * @param alias name of the field - * @param nestedType - * @return + * @param nestedType type of elements in the list + * @return a group representing the list using a 2-level representation * @deprecated use listOfElements instead */ @Deprecated diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java index 68dba97..49c29a3 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java @@ -107,7 +107,7 @@ public class GroupType extends Type { } /** - * @param newFields + * @param newFields a list of types to use as fields in a copy of this group * @return a group with the same attributes and new fields. */ public GroupType withNewFields(List<Type> newFields) { @@ -115,7 +115,7 @@ public class GroupType extends Type { } /** - * @param newFields + * @param newFields an array of types to use as fields in a copy of this group * @return a group with the same attributes and new fields. */ public GroupType withNewFields(Type... newFields) { @@ -141,7 +141,7 @@ public class GroupType extends Type { /** * - * @param name + * @param name string name of a field * @return the index of the field with that name */ public int getFieldIndex(String name) { @@ -174,7 +174,7 @@ public class GroupType extends Type { } /** - * @param fieldName + * @param fieldName string name of a field * @return the type of this field by name */ public Type getType(String fieldName) { @@ -182,7 +182,7 @@ public class GroupType extends Type { } /** - * @param index + * @param index integer index of a field * @return the type of this field by index */ public Type getType(int index) { diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/MessageType.java b/parquet-column/src/main/java/org/apache/parquet/schema/MessageType.java index afbc416..943e2a0 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/MessageType.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/MessageType.java @@ -72,6 +72,7 @@ public final class MessageType extends GroupType { } /** + * @param path an array of strings representing the name path in this type * @return the max repetition level that might be needed to encode the * type at 'path'. */ @@ -80,6 +81,7 @@ public final class MessageType extends GroupType { } /** + * @param path an array of strings representing the name path in this type * @return the max repetition level that might be needed to encode the * type at 'path'. */ diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/PrimitiveType.java b/parquet-column/src/main/java/org/apache/parquet/schema/PrimitiveType.java index a421173..8124906 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/PrimitiveType.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/PrimitiveType.java @@ -358,7 +358,7 @@ public final class PrimitiveType extends Type { /** * reads the value from the columnReader with the appropriate accessor and returns a String representation - * @param columnReader + * @param columnReader where to read * @return a string */ abstract public String toString(ColumnReader columnReader); @@ -719,6 +719,9 @@ public final class PrimitiveType extends Type { * Returns the {@link Type} specific comparator for properly comparing values. The natural ordering of the values * might not proper in certain cases (e.g. {@code UINT_32} requires unsigned comparison of {@code int} values while * the natural ordering is signed.) + * + * @param <T> the type of values compared by the returned PrimitiveComparator + * @return a PrimitiveComparator for values of this type */ @SuppressWarnings("unchecked") public <T> PrimitiveComparator<T> comparator() { diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/Type.java b/parquet-column/src/main/java/org/apache/parquet/schema/Type.java index dd2c38d..c34651e 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/Type.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/Type.java @@ -47,6 +47,7 @@ abstract public class Type { /** * For bean serialization, used by Cascading 3. + * @return this type's id * @deprecated use {@link #intValue()} instead. */ @Deprecated @@ -110,7 +111,7 @@ abstract public class Type { ; /** - * @param other + * @param other a repetition to test * @return true if it is strictly more restrictive than other */ abstract public boolean isMoreRestrictiveThan(Repetition other); @@ -156,7 +157,7 @@ abstract public class Type { } /** - * @param id + * @param id an integer id * @return the same type with the id field set */ public abstract Type withId(int id); @@ -169,7 +170,7 @@ abstract public class Type { } /** - * @param rep + * @param rep repetition level to test * @return if repetition of the type is rep */ public boolean isRepetition(Repetition rep) { @@ -319,7 +320,9 @@ abstract public class Type { /** * + * @param path a list of groups to convert * @param converter logic to convert the tree + * @param <T> the type returned by the converter * @return the converted tree */ abstract <T> T convert(List<GroupType> path, TypeConverter<T> converter); diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/Types.java b/parquet-column/src/main/java/org/apache/parquet/schema/Types.java index 0422a9d..0a9b91f 100644 --- a/parquet-column/src/main/java/org/apache/parquet/schema/Types.java +++ b/parquet-column/src/main/java/org/apache/parquet/schema/Types.java @@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory; * Types.optional(INT32).named("number"); * </pre> * <p> - * The {@link #required(PrimitiveTypeName)} factory method produces a primitive + * The required(PrimitiveTypeName) factory method produces a primitive * type builder, and the {@link PrimitiveBuilder#named(String)} builds the * {@link PrimitiveType}. Between {@code required} and {@code named}, other * builder methods can be used to add type annotations or other type metadata: @@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory; * Types.optional(FIXED_LEN_BYTE_ARRAY).length(20).named("sha1"); * </pre> * <p> - * Optional types are built using {@link #optional(PrimitiveTypeName)} to get + * Optional types are built using optional(PrimitiveTypeName) to get * the builder. * <p> * Groups are built similarly, using {@code requiredGroup()} (or the optional @@ -105,6 +105,7 @@ import org.slf4j.LoggerFactory; * can be built using {@code requiredGroupValue()} (or the optionalGroupValue() * version) that returns MapGroupValue builder. * + * <pre> * // required group zipMap (MAP) { * // repeated group map (MAP_KEY_VALUE) { * // required float key @@ -629,6 +630,7 @@ public class Types { /** * Adds {@code type} as a sub-field to the group configured by this builder. * + * @param type the type to add as a field * @return this builder for additional fields. */ public THIS addField(Type type) { @@ -639,6 +641,7 @@ public class Types { /** * Adds {@code types} as sub-fields of the group configured by this builder. * + * @param types an array of types to add as fields * @return this builder for additional fields. */ public THIS addFields(Type... types) { diff --git a/parquet-common/src/main/java/org/apache/parquet/Exceptions.java b/parquet-common/src/main/java/org/apache/parquet/Exceptions.java index bdd531c..79429bd 100644 --- a/parquet-common/src/main/java/org/apache/parquet/Exceptions.java +++ b/parquet-common/src/main/java/org/apache/parquet/Exceptions.java @@ -22,6 +22,11 @@ package org.apache.parquet; public class Exceptions { /** * If the given throwable is an instance of E, throw it as an E. + * + * @param t an exception instance + * @param excClass an exception class t may be an instance of + * @param <E> the type of exception that will be thrown if throwable is an instance + * @throws E if t is an instance of E */ public static <E extends Exception> void throwIfInstance(Throwable t, Class<E> excClass) diff --git a/parquet-common/src/main/java/org/apache/parquet/IOExceptionUtils.java b/parquet-common/src/main/java/org/apache/parquet/IOExceptionUtils.java index 2ac8a2b..ea1a722 100644 --- a/parquet-common/src/main/java/org/apache/parquet/IOExceptionUtils.java +++ b/parquet-common/src/main/java/org/apache/parquet/IOExceptionUtils.java @@ -27,7 +27,7 @@ import java.io.IOException; public class IOExceptionUtils { /** - * Call the #close() method on a {@see Closable}, wrapping any IOException + * Call the close method on a {@link Closeable}, wrapping any IOException * in a runtime exception. * * @param closeable - resource to close diff --git a/parquet-common/src/main/java/org/apache/parquet/Log.java b/parquet-common/src/main/java/org/apache/parquet/Log.java index e05465b..7856686 100644 --- a/parquet-common/src/main/java/org/apache/parquet/Log.java +++ b/parquet-common/src/main/java/org/apache/parquet/Log.java @@ -64,7 +64,7 @@ public class Log { /** * prints a debug message - * @param m + * @param m a log message */ public void debug(Object m) { if (m instanceof Throwable) { @@ -76,8 +76,8 @@ public class Log { /** * prints a debug message - * @param m - * @param t + * @param m a log message + * @param t a throwable error */ public void debug(Object m, Throwable t) { logger.debug(String.valueOf(m), t); @@ -85,7 +85,7 @@ public class Log { /** * prints an info message - * @param m + * @param m a log message */ public void info(Object m) { if (m instanceof Throwable) { @@ -97,8 +97,8 @@ public class Log { /** * prints an info message - * @param m - * @param t + * @param m a log message + * @param t a throwable error */ public void info(Object m, Throwable t) { logger.info(String.valueOf(m), t); @@ -106,7 +106,7 @@ public class Log { /** * prints a warn message - * @param m + * @param m a log message */ public void warn(Object m) { if (m instanceof Throwable) { @@ -118,8 +118,8 @@ public class Log { /** * prints a warn message - * @param m - * @param t + * @param m a log message + * @param t a throwable error */ public void warn(Object m, Throwable t) { logger.warn(String.valueOf(m), t); @@ -127,7 +127,7 @@ public class Log { /** * prints an error message - * @param m + * @param m a log message */ public void error(Object m) { if (m instanceof Throwable) { @@ -139,8 +139,8 @@ public class Log { /** * prints an error message - * @param m - * @param t + * @param m a log message + * @param t a throwable error */ public void error(Object m, Throwable t) { logger.error(String.valueOf(m), t); diff --git a/parquet-common/src/main/java/org/apache/parquet/Preconditions.java b/parquet-common/src/main/java/org/apache/parquet/Preconditions.java index 81868c7..3a14306 100644 --- a/parquet-common/src/main/java/org/apache/parquet/Preconditions.java +++ b/parquet-common/src/main/java/org/apache/parquet/Preconditions.java @@ -30,6 +30,7 @@ public final class Preconditions { /** * @param o the param to check * @param name the name of the param for the error message + * @param <T> the type of the object * @return the validated o * @throws NullPointerException if o is null */ diff --git a/parquet-common/src/main/java/org/apache/parquet/Strings.java b/parquet-common/src/main/java/org/apache/parquet/Strings.java index 644e26a..17a0456 100644 --- a/parquet-common/src/main/java/org/apache/parquet/Strings.java +++ b/parquet-common/src/main/java/org/apache/parquet/Strings.java @@ -78,6 +78,9 @@ public final class Strings { /** * Returns true if s.isEmpty() or s == null + * + * @param s a string that may be null or empty + * @return true if the string s is null or is empty */ public static boolean isNullOrEmpty(String s) { return s == null || s.isEmpty(); @@ -93,10 +96,10 @@ public final class Strings { * its elements with commas, and a one-of group may contain sub one-of groups. * * For example: - * start{a,b,c}end -> startaend, startbend, startcend - * start{a,{b,c},d} -> startaend, startbend, startcend, startdend - * {a,b,c} -> a, b, c - * start{a, b{x,y}} -> starta, startbx, startby + * start{a,b,c}end -> startaend, startbend, startcend + * start{a,{b,c},d} -> startaend, startbend, startcend, startdend + * {a,b,c} -> a, b, c + * start{a, b{x,y}} -> starta, startbx, startby * * @param globPattern a string in the format described above * @return a list of all the strings that would satisfy globPattern, including duplicates @@ -111,6 +114,7 @@ public final class Strings { * * @param globPattern a String to be passed to {@link #expandGlob(String)} * @param delim the delimeter used by {@link WildcardPath} + * @return a list of wildcard paths, one for each expanded result */ public static List<WildcardPath> expandGlobToWildCardPaths(String globPattern, char delim) { List<WildcardPath> ret = new ArrayList<WildcardPath>(); diff --git a/parquet-common/src/main/java/org/apache/parquet/bytes/ByteBufferAllocator.java b/parquet-common/src/main/java/org/apache/parquet/bytes/ByteBufferAllocator.java index ee36b74..af34995 100644 --- a/parquet-common/src/main/java/org/apache/parquet/bytes/ByteBufferAllocator.java +++ b/parquet-common/src/main/java/org/apache/parquet/bytes/ByteBufferAllocator.java @@ -26,6 +26,8 @@ public interface ByteBufferAllocator { /** * For RefCounted implementations using direct memory, the release method * needs to be called to free references to the allocated memory. + * + * @param b a ByteBuffer */ void release(ByteBuffer b); diff --git a/parquet-common/src/main/java/org/apache/parquet/bytes/BytesInput.java b/parquet-common/src/main/java/org/apache/parquet/bytes/BytesInput.java index 1512a24..fd4986a 100644 --- a/parquet-common/src/main/java/org/apache/parquet/bytes/BytesInput.java +++ b/parquet-common/src/main/java/org/apache/parquet/bytes/BytesInput.java @@ -67,7 +67,7 @@ abstract public class BytesInput { } /** - * @param in + * @param in an input stream * @param bytes number of bytes to read * @return a BytesInput that will read that number of bytes from the stream */ @@ -76,7 +76,7 @@ abstract public class BytesInput { } /** - * @param buffers + * @param buffers an array of byte buffers * @return a BytesInput that will read the given bytes from the ByteBuffers */ public static BytesInput from(ByteBuffer... buffers) { @@ -87,7 +87,7 @@ abstract public class BytesInput { } /** - * @param buffers + * @param buffers a list of byte buffers * @return a BytesInput that will read the given bytes from the ByteBuffers */ public static BytesInput from(List<ByteBuffer> buffers) { @@ -99,7 +99,7 @@ abstract public class BytesInput { /** * - * @param in + * @param in a byte array * @return a Bytes input that will write the given bytes */ public static BytesInput from(byte[] in) { @@ -131,6 +131,7 @@ abstract public class BytesInput { /** * * @param intValue the int to write + * @return a ByteInput that contains the int value as a variable-length zig-zag encoded int */ public static BytesInput fromZigZagVarInt(int intValue) { int zigZag = (intValue << 1) ^ (intValue >> 31); @@ -148,6 +149,7 @@ abstract public class BytesInput { /** * * @param longValue the long to write + * @return a ByteInput that contains the long value as a variable-length zig-zag encoded long */ public static BytesInput fromZigZagVarLong(long longValue) { long zigZag = (longValue << 1) ^ (longValue >> 63); @@ -155,7 +157,7 @@ abstract public class BytesInput { } /** - * @param arrayOut + * @param arrayOut a capacity byte array output stream to wrap into a BytesInput * @return a BytesInput that will write the content of the buffer */ public static BytesInput from(CapacityByteArrayOutputStream arrayOut) { @@ -179,9 +181,9 @@ abstract public class BytesInput { /** * copies the input into a new byte array - * @param bytesInput - * @return - * @throws IOException + * @param bytesInput a BytesInput + * @return a copy of the BytesInput + * @throws IOException if there is an exception when reading bytes from the BytesInput */ public static BytesInput copy(BytesInput bytesInput) throws IOException { return from(bytesInput.toByteArray()); @@ -189,15 +191,15 @@ abstract public class BytesInput { /** * writes the bytes into a stream - * @param out - * @throws IOException + * @param out an output stream + * @throws IOException if there is an exception writing */ abstract public void writeAllTo(OutputStream out) throws IOException; /** * * @return a new byte array materializing the contents of this input - * @throws IOException + * @throws IOException if there is an exception reading */ public byte[] toByteArray() throws IOException { BAOS baos = new BAOS((int)size()); @@ -209,7 +211,7 @@ abstract public class BytesInput { /** * * @return a new ByteBuffer materializing the contents of this input - * @throws IOException + * @throws IOException if there is an exception reading */ public ByteBuffer toByteBuffer() throws IOException { return ByteBuffer.wrap(toByteArray()); @@ -218,7 +220,7 @@ abstract public class BytesInput { /** * * @return a new InputStream materializing the contents of this input - * @throws IOException + * @throws IOException if there is an exception reading */ public ByteBufferInputStream toInputStream() throws IOException { return ByteBufferInputStream.wrap(toByteBuffer()); diff --git a/parquet-common/src/main/java/org/apache/parquet/bytes/BytesUtils.java b/parquet-common/src/main/java/org/apache/parquet/bytes/BytesUtils.java index 266685d..ce06b17 100644 --- a/parquet-common/src/main/java/org/apache/parquet/bytes/BytesUtils.java +++ b/parquet-common/src/main/java/org/apache/parquet/bytes/BytesUtils.java @@ -50,10 +50,10 @@ public class BytesUtils { /** * reads an int in little endian at the given position - * @param in - * @param offset - * @return - * @throws IOException + * @param in a byte buffer + * @param offset an offset into the byte buffer + * @return the integer at position offset read using little endian byte order + * @throws IOException if there is an exception reading from the byte buffer */ public static int readIntLittleEndian(ByteBuffer in, int offset) throws IOException { int ch4 = in.get(offset) & 0xff; @@ -65,10 +65,10 @@ public class BytesUtils { /** * reads an int in little endian at the given position - * @param in - * @param offset - * @return - * @throws IOException + * @param in a byte array + * @param offset an offset into the byte array + * @return the integer at position offset read using little endian byte order + * @throws IOException if there is an exception reading from the byte array */ public static int readIntLittleEndian(byte[] in, int offset) throws IOException { int ch4 = in[offset] & 0xff; @@ -165,6 +165,11 @@ public class BytesUtils { /** * Write a little endian int to out, using the the number of bytes required by * bit width + * @param out an output stream + * @param v an int value + * @param bitWidth bit width for padding + * @throws IOException if there is an exception while writing + * */ public static void writeIntLittleEndianPaddedOnBitWidth(OutputStream out, int v, int bitWidth) throws IOException { @@ -204,9 +209,9 @@ public class BytesUtils { /** * uses a trick mentioned in https://developers.google.com/protocol-buffers/docs/encoding to read zigZag encoded data - * @param in - * @return - * @throws IOException + * @param in an input stream + * @return the value of a zig-zag varint read from the current position in the stream + * @throws IOException if there is an exception while reading */ public static int readZigZagVarInt(InputStream in) throws IOException { int raw = readUnsignedVarInt(in); @@ -237,9 +242,9 @@ public class BytesUtils { /** * uses a trick mentioned in https://developers.google.com/protocol-buffers/docs/encoding to read zigZag encoded data * TODO: the implementation is compatible with readZigZagVarInt. Is there a need for different functions? - * @param in - * @return - * @throws IOException + * @param in an input stream + * @return the value of a zig-zag var-long read from the current position in the stream + * @throws IOException if there is an exception while reading */ public static long readZigZagVarLong(InputStream in) throws IOException { long raw = readUnsignedVarLong(in); diff --git a/parquet-common/src/main/java/org/apache/parquet/bytes/CapacityByteArrayOutputStream.java b/parquet-common/src/main/java/org/apache/parquet/bytes/CapacityByteArrayOutputStream.java index 92674d4..44a5623 100644 --- a/parquet-common/src/main/java/org/apache/parquet/bytes/CapacityByteArrayOutputStream.java +++ b/parquet-common/src/main/java/org/apache/parquet/bytes/CapacityByteArrayOutputStream.java @@ -81,6 +81,7 @@ public class CapacityByteArrayOutputStream extends OutputStream { * @param minSlabSize no matter what we shouldn't make slabs any smaller than this * @param targetCapacity after we've allocated targetNumSlabs how much capacity should we have? * @param targetNumSlabs how many slabs should it take to reach targetCapacity? + * @return an initial slab size */ public static int initialSlabSizeHeuristic(int minSlabSize, int targetCapacity, int targetNumSlabs) { // initialSlabSize = (targetCapacity / (2^targetNumSlabs)) means we double targetNumSlabs times @@ -98,6 +99,12 @@ public class CapacityByteArrayOutputStream extends OutputStream { /** * Construct a CapacityByteArrayOutputStream configured such that its initial slab size is * determined by {@link #initialSlabSizeHeuristic}, with targetCapacity == maxCapacityHint + * + * @param minSlabSize a minimum slab size + * @param maxCapacityHint a hint for the maximum required capacity + * @param targetNumSlabs the target number of slabs + * @param allocator an allocator to use when creating byte buffers for slabs + * @return a capacity baos */ public static CapacityByteArrayOutputStream withTargetNumSlabs( int minSlabSize, int maxCapacityHint, int targetNumSlabs, ByteBufferAllocator allocator) { @@ -109,7 +116,7 @@ public class CapacityByteArrayOutputStream extends OutputStream { /** * Defaults maxCapacityHint to 1MB - * @param initialSlabSize + * @param initialSlabSize an initial slab size * @deprecated use {@link CapacityByteArrayOutputStream#CapacityByteArrayOutputStream(int, int, ByteBufferAllocator)} */ @Deprecated @@ -119,7 +126,8 @@ public class CapacityByteArrayOutputStream extends OutputStream { /** * Defaults maxCapacityHint to 1MB - * @param initialSlabSize + * @param initialSlabSize an initial slab size + * @param allocator an allocator to use when creating byte buffers for slabs * @deprecated use {@link CapacityByteArrayOutputStream#CapacityByteArrayOutputStream(int, int, ByteBufferAllocator)} */ @Deprecated @@ -140,6 +148,7 @@ public class CapacityByteArrayOutputStream extends OutputStream { /** * @param initialSlabSize the size to make the first slab * @param maxCapacityHint a hint (not guarantee) of the max amount of data written to this stream + * @param allocator an allocator to use when creating byte buffers for slabs */ public CapacityByteArrayOutputStream(int initialSlabSize, int maxCapacityHint, ByteBufferAllocator allocator) { checkArgument(initialSlabSize > 0, "initialSlabSize must be > 0"); diff --git a/parquet-common/src/main/java/org/apache/parquet/bytes/LittleEndianDataInputStream.java b/parquet-common/src/main/java/org/apache/parquet/bytes/LittleEndianDataInputStream.java index a092753..9a886f4 100644 --- a/parquet-common/src/main/java/org/apache/parquet/bytes/LittleEndianDataInputStream.java +++ b/parquet-common/src/main/java/org/apache/parquet/bytes/LittleEndianDataInputStream.java @@ -118,8 +118,8 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @return - * @throws IOException + * @return the next byte in the stream as an int + * @throws IOException if there is an exception while reading * @see java.io.InputStream#read() */ public int read() throws IOException { @@ -127,7 +127,7 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @return + * @return the hash code of the wrapped input stream * @see java.lang.Object#hashCode() */ public int hashCode() { @@ -135,9 +135,9 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @param b - * @return - * @throws IOException + * @param b a byte array + * @return the number of bytes read + * @throws IOException if there was an exception while reading * @see java.io.InputStream#read(byte[]) */ public int read(byte[] b) throws IOException { @@ -145,8 +145,8 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @param obj - * @return + * @param obj another object + * @return true if this is equal to the object * @see java.lang.Object#equals(java.lang.Object) */ public boolean equals(Object obj) { @@ -154,11 +154,11 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @param b - * @param off - * @param len - * @return - * @throws IOException + * @param b a byte array + * @param off an offset into the byte array + * @param len the length to read + * @return the number of bytes read + * @throws IOException if there was an exception while reading * @see java.io.InputStream#read(byte[], int, int) */ public int read(byte[] b, int off, int len) throws IOException { @@ -166,9 +166,9 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @param n - * @return - * @throws IOException + * @param n the number of bytes to skip + * @return the number of bytes skipped + * @throws IOException if there was an exception while reading * @see java.io.InputStream#skip(long) */ public long skip(long n) throws IOException { @@ -176,8 +176,8 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @return - * @throws IOException + * @return the number of bytes available in the wrapped stream + * @throws IOException if there was an exception while getting the number of available bytes * @see java.io.InputStream#available() */ public int available() throws IOException { @@ -185,7 +185,7 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @throws IOException + * @throws IOException if there was an exception while closing the underlying stream * @see java.io.InputStream#close() */ public void close() throws IOException { @@ -193,7 +193,7 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @param readlimit + * @param readlimit the number of bytes the mark will be valid for * @see java.io.InputStream#mark(int) */ public void mark(int readlimit) { @@ -201,7 +201,7 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @throws IOException + * @throws IOException if there is an exception while resetting the underlying stream * @see java.io.InputStream#reset() */ public void reset() throws IOException { @@ -209,7 +209,7 @@ public final class LittleEndianDataInputStream extends InputStream { } /** - * @return + * @return true if mark is supported * @see java.io.InputStream#markSupported() */ public boolean markSupported() { diff --git a/parquet-common/src/main/java/org/apache/parquet/glob/GlobExpander.java b/parquet-common/src/main/java/org/apache/parquet/glob/GlobExpander.java index 79b633c..e4b0a9a 100644 --- a/parquet-common/src/main/java/org/apache/parquet/glob/GlobExpander.java +++ b/parquet-common/src/main/java/org/apache/parquet/glob/GlobExpander.java @@ -33,7 +33,22 @@ public final class GlobExpander { private GlobExpander() { } /** - * See {@link org.apache.parquet.Strings#expandGlob(String)} for docs. + * Expands a string with braces ("{}") into all of its possible permutations. + * We call anything inside of {} braces a "one-of" group. + * + * The only special characters in this glob syntax are '}', '{' and ',' + * + * The top-level pattern must not contain any commas, but a "one-of" group separates + * its elements with commas, and a one-of group may contain sub one-of groups. + * + * For example: + * start{a,b,c}end -> startaend, startbend, startcend + * start{a,{b,c},d} -> startaend, startbend, startcend, startdend + * {a,b,c} -> a, b, c + * start{a, b{x,y}} -> starta, startbx, startby + * + * @param globPattern a string in the format described above + * @return a list of all the strings that would satisfy globPattern, including duplicates */ public static List<String> expand(String globPattern) { return GlobExpanderImpl.expand(GlobParser.parse(globPattern)); @@ -111,4 +126,4 @@ public final class GlobExpander { return result; } } -} \ No newline at end of file +} diff --git a/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/Canonicalizer.java b/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/Canonicalizer.java index b5c08cc..a3409da 100644 --- a/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/Canonicalizer.java +++ b/parquet-common/src/main/java/org/apache/parquet/hadoop/metadata/Canonicalizer.java @@ -26,7 +26,7 @@ import java.util.concurrent.ConcurrentHashMap; * this class is thread safe * @author Julien Le Dem * - * @param <T> + * @param <T> the type of values canonicalized by subclasses */ public class Canonicalizer<T> { diff --git a/parquet-common/src/main/java/org/apache/parquet/io/SeekableInputStream.java b/parquet-common/src/main/java/org/apache/parquet/io/SeekableInputStream.java index 7247817..ed20163 100644 --- a/parquet-common/src/main/java/org/apache/parquet/io/SeekableInputStream.java +++ b/parquet-common/src/main/java/org/apache/parquet/io/SeekableInputStream.java @@ -70,6 +70,8 @@ public abstract class SeekableInputStream extends InputStream { * array is full. * * @param bytes a byte array to fill with data from the stream + * @param start the starting position in the byte array for data + * @param len the length of bytes to read into the byte array * @throws IOException If the underlying stream throws IOException * @throws EOFException If the stream has fewer than {@code len} bytes left */ @@ -83,7 +85,7 @@ public abstract class SeekableInputStream extends InputStream { * returned by the method, or -1 is returned to signal that the end of the * underlying stream has been reached. * - * @param buf a byte array to fill with data from the stream + * @param buf a byte buffer to fill with data from the stream * @return the number of bytes read or -1 if the stream ended * @throws IOException If the underlying stream throws IOException */ @@ -96,7 +98,7 @@ public abstract class SeekableInputStream extends InputStream { * to copy into the buffer, or will throw {@link EOFException} if the stream * ends before the buffer is full. * - * @param buf a byte array to fill with data from the stream + * @param buf a byte buffer to fill with data from the stream * @throws IOException If the underlying stream throws IOException * @throws EOFException If the stream has fewer bytes left than are needed to * fill the buffer, {@code buf.remaining()} diff --git a/parquet-common/src/main/java/org/apache/parquet/util/DynMethods.java b/parquet-common/src/main/java/org/apache/parquet/util/DynMethods.java index 769f31c..e4f025d 100644 --- a/parquet-common/src/main/java/org/apache/parquet/util/DynMethods.java +++ b/parquet-common/src/main/java/org/apache/parquet/util/DynMethods.java @@ -232,8 +232,6 @@ public class DynMethods { * @param methodName name of a method (different from constructor) * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder impl(String className, String methodName, Class<?>... argClasses) { // don't do any work if an implementation has been found @@ -258,8 +256,6 @@ public class DynMethods { * @param className name of a class * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder impl(String className, Class<?>... argClasses) { impl(className, name, argClasses); @@ -269,11 +265,10 @@ public class DynMethods { /** * Checks for a method implementation. * + * @param targetClass the class to check for an implementation * @param methodName name of a method (different from constructor) * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder impl(Class<?> targetClass, String methodName, Class<?>... argClasses) { // don't do any work if an implementation has been found @@ -295,10 +290,9 @@ public class DynMethods { * * The name passed to the constructor is the method name used. * + * @param targetClass the class to check for an implementation * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder impl(Class<?> targetClass, Class<?>... argClasses) { impl(targetClass, name, argClasses); @@ -344,8 +338,6 @@ public class DynMethods { * @param methodName name of a method (different from constructor) * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder hiddenImpl(String className, String methodName, Class<?>... argClasses) { // don't do any work if an implementation has been found @@ -370,8 +362,6 @@ public class DynMethods { * @param className name of a class * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder hiddenImpl(String className, Class<?>... argClasses) { hiddenImpl(className, name, argClasses); @@ -381,11 +371,10 @@ public class DynMethods { /** * Checks for a method implementation. * + * @param targetClass the class to check for an implementation * @param methodName name of a method (different from constructor) * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder hiddenImpl(Class<?> targetClass, String methodName, Class<?>... argClasses) { // don't do any work if an implementation has been found @@ -410,10 +399,9 @@ public class DynMethods { * * The name passed to the constructor is the method name used. * + * @param targetClass the class to check for an implementation * @param argClasses argument classes for the method * @return this Builder for method chaining - * @see {@link java.lang.Class#forName(String)} - * @see {@link java.lang.Class#getMethod(String, Class[])} */ public Builder hiddenImpl(Class<?> targetClass, Class<?>... argClasses) { hiddenImpl(targetClass, name, argClasses); diff --git a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BitPacking.java b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BitPacking.java index 7b557c7..3e73fc7 100755 --- a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BitPacking.java +++ b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BitPacking.java @@ -46,13 +46,13 @@ public class BitPacking { /** * will write the bits to the underlying stream aligned on the buffer size * @param val the value to encode - * @throws IOException + * @throws IOException if there is an exception while writing */ abstract public void write(int val) throws IOException; /** * will flush the buffer to the underlying stream (and pad with 0s) - * @throws IOException + * @throws IOException if there is an exception while finishing */ abstract public void finish() throws IOException; } @@ -68,7 +68,7 @@ public class BitPacking { /** * * @return and int decoded from the underlying stream - * @throws IOException + * @throws IOException if there is an exception while reading */ abstract public int read() throws IOException; } @@ -110,6 +110,7 @@ public class BitPacking { * * @param bitLength the width in bits of the integers to read * @param in the stream to read the bytes from + * @param valueCount not sure * @return the correct implementation for the width */ public static BitPackingReader createBitPackingReader(int bitLength, InputStream in, long valueCount) { diff --git a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/ByteBasedBitPackingEncoder.java b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/ByteBasedBitPackingEncoder.java index 0bc8b30..1ce1e6a 100644 --- a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/ByteBasedBitPackingEncoder.java +++ b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/ByteBasedBitPackingEncoder.java @@ -55,6 +55,7 @@ public class ByteBasedBitPackingEncoder { /** * @param bitWidth the number of bits used to encode an int + * @param packer factory for bit packing implementations */ public ByteBasedBitPackingEncoder(int bitWidth, Packer packer) { this.bitWidth = bitWidth; @@ -68,9 +69,9 @@ public class ByteBasedBitPackingEncoder { /** * writes an int using the requested number of bits. - * accepts only value < 2^bitWidth + * accepts only values less than 2^bitWidth * @param value the value to write - * @throws IOException + * @throws IOException if there is an exception while writing */ public void writeInt(int value) throws IOException { input[inputSize] = value; @@ -102,7 +103,7 @@ public class ByteBasedBitPackingEncoder { /** * @return the bytes representing the packed values - * @throws IOException + * @throws IOException if there is an exception while creating the BytesInput */ public BytesInput toBytes() throws IOException { int packedByteLength = packedPosition + BytesUtils.paddedByteCountFromBits(inputSize * bitWidth); diff --git a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BytePacker.java b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BytePacker.java index 675576c..634f5b0 100644 --- a/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BytePacker.java +++ b/parquet-encoding/src/main/java/org/apache/parquet/column/values/bitpacking/BytePacker.java @@ -77,6 +77,10 @@ public abstract class BytePacker { /** * Compatibility API + * @param input the input bytes + * @param inPos where to read from in input + * @param output the output values + * @param outPos where to write to in output */ @Deprecated public void unpack8Values(final byte[] input, final int inPos, final int[] output, final int outPos) { @@ -95,6 +99,10 @@ public abstract class BytePacker { /** * Compatibility API + * @param input the input bytes + * @param inPos where to read from in input + * @param output the output values + * @param outPos where to write to in output */ @Deprecated public void unpack32Values(byte[] input, int inPos, int[] output, int outPos) { diff --git a/parquet-generator/src/main/java/org/apache/parquet/encoding/bitpacking/ByteBasedBitPackingGenerator.java b/parquet-generator/src/main/java/org/apache/parquet/encoding/bitpacking/ByteBasedBitPackingGenerator.java index 6b72592..842f41f 100644 --- a/parquet-generator/src/main/java/org/apache/parquet/encoding/bitpacking/ByteBasedBitPackingGenerator.java +++ b/parquet-generator/src/main/java/org/apache/parquet/encoding/bitpacking/ByteBasedBitPackingGenerator.java @@ -76,8 +76,8 @@ public class ByteBasedBitPackingGenerator { fw.append(" * Packs from the Least Significant Bit first\n"); } fw.append(" * \n"); + fw.append(" * See ByteBasedBitPackingGenerator to make changes to this file\n"); fw.append(" * @author automatically generated\n"); - fw.append(" * @see ByteBasedBitPackingGenerator\n"); fw.append(" *\n"); fw.append(" */\n"); fw.append("public abstract class " + className + " {\n"); diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java index e0907f9..4530abc 100644 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/CodecConfig.java @@ -58,6 +58,7 @@ public abstract class CodecConfig { /** * use mapred api to read codec config + * @param jobConf a mapred job configuration * @return MapredCodecConfig */ public static CodecConfig from(JobConf jobConf) { @@ -66,6 +67,7 @@ public abstract class CodecConfig { /** * use mapreduce api to read codec config + * @param context a mapreduce task attempt context * @return MapreduceCodecConfig */ public static CodecConfig from(TaskAttemptContext context) { diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/SnappyDecompressor.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/SnappyDecompressor.java index 66e3b81..190f8d5 100644 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/SnappyDecompressor.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/codec/SnappyDecompressor.java @@ -45,7 +45,7 @@ public class SnappyDecompressor implements Decompressor { * @param off Start offset of the data * @param len Size of the buffer * @return The actual number of bytes of uncompressed data. - * @throws IOException + * @throws IOException if reading or decompression fails */ @Override public synchronized int decompress(byte[] buffer, int off, int len) throws IOException { diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkMetaData.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkMetaData.java index e198698..562bcad 100644 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkMetaData.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkMetaData.java @@ -68,6 +68,18 @@ abstract public class ColumnChunkMetaData { } /** + * @param path the path of this column in the write schema + * @param type primitive type for this column + * @param codec the compression codec used to compress + * @param encodingStats EncodingStats for the encodings used in this column + * @param encodings a set of encoding used in this column + * @param statistics statistics for the data in this column + * @param firstDataPage offset of the first non-dictionary page + * @param dictionaryPageOffset offset of the the dictionary page + * @param valueCount number of values + * @param totalSize total compressed size + * @param totalUncompressedSize uncompressed data size + * @return a column chunk metadata instance * @deprecated will be removed in 2.0.0. Use * {@link #get(ColumnPath, PrimitiveType, CompressionCodecName, EncodingStats, Set, Statistics, long, long, long, long, long)} * instead. @@ -145,7 +157,7 @@ abstract public class ColumnChunkMetaData { /** * checks that a positive long value fits in an int. * (reindexed on Integer.MIN_VALUE) - * @param value + * @param value a long value * @return whether it fits */ protected static boolean positiveLongFitsInAnInt(long value) { diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkProperties.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkProperties.java index 233cf94..55ec7c7 100644 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkProperties.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ColumnChunkProperties.java @@ -31,6 +31,11 @@ public class ColumnChunkProperties { private static Canonicalizer<ColumnChunkProperties> properties = new Canonicalizer<ColumnChunkProperties>(); /** + * @param path the path of this column in the write schema + * @param type the primitive type of this column + * @param codec the compression codec used for this column + * @param encodings a set of encodings used by this column + * @return column chunk properties * @deprecated will be removed in 2.0.0. Use {@link #get(ColumnPath, PrimitiveType, CompressionCodecName, Set)} * instead. */ diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ParquetMetadata.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ParquetMetadata.java index cb6af54..3ee61cd 100755 --- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ParquetMetadata.java +++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/metadata/ParquetMetadata.java @@ -42,8 +42,7 @@ public class ParquetMetadata { private static final ObjectMapper objectMapper = new ObjectMapper(); /** - * - * @param parquetMetaData + * @param parquetMetaData an instance of parquet metadata to convert * @return the json representation */ public static String toJSON(ParquetMetadata parquetMetaData) { @@ -52,7 +51,7 @@ public class ParquetMetadata { /** * - * @param parquetMetaData + * @param parquetMetaData an instance of parquet metadata to convert * @return the pretty printed json representation */ public static String toPrettyJSON(ParquetMetadata parquetMetaData) { -- To stop receiving notification emails like this one, please contact b...@apache.org.