[ 
https://issues.apache.org/jira/browse/HIVE-3874?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=13594521#comment-13594521
 ] 

Hudson commented on HIVE-3874:
------------------------------

Integrated in Hive-trunk-h0.21 #2002 (See 
[https://builds.apache.org/job/Hive-trunk-h0.21/2002/])
    HIVE-3874. Create a new Optimized Row Columnar file format for Hive. (Owen 
O'Malley via kevinwilfong) (Revision 1452992)

     Result = SUCCESS
kevinwilfong : 
http://svn.apache.org/viewcvs.cgi/?root=Apache-SVN&view=rev&rev=1452992
Files : 
* /hive/trunk/build.properties
* /hive/trunk/build.xml
* /hive/trunk/ivy/libraries.properties
* /hive/trunk/ql/build.xml
* /hive/trunk/ql/ivy.xml
* /hive/trunk/ql/src/gen/protobuf
* /hive/trunk/ql/src/gen/protobuf/gen-java
* /hive/trunk/ql/src/gen/protobuf/gen-java/org
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io
* /hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc
* 
/hive/trunk/ql/src/gen/protobuf/gen-java/org/apache/hadoop/hive/ql/io/orc/OrcProto.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/BitFieldReader.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/BitFieldWriter.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/BooleanColumnStatistics.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatistics.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ColumnStatisticsImpl.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionCodec.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/CompressionKind.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/DoubleColumnStatistics.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/DynamicByteArray.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/DynamicIntArray.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/InStream.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/IntegerColumnStatistics.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcInputFormat.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcOutputFormat.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcStruct.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcUnion.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/PositionProvider.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/PositionRecorder.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/PositionedOutputStream.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Reader.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ReaderImpl.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReader.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RedBlackTree.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthByteReader.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthByteWriter.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerReader.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RunLengthIntegerWriter.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SerializationUtils.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/SnappyCodec.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/StreamName.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/StringColumnStatistics.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/StringRedBlackTree.java
* 
/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/StripeInformation.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/Writer.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/WriterImpl.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/ZlibCodec.java
* /hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/package-info.java
* /hive/trunk/ql/src/protobuf
* /hive/trunk/ql/src/protobuf/org
* /hive/trunk/ql/src/protobuf/org/apache
* /hive/trunk/ql/src/protobuf/org/apache/hadoop
* /hive/trunk/ql/src/protobuf/org/apache/hadoop/hive
* /hive/trunk/ql/src/protobuf/org/apache/hadoop/hive/ql
* /hive/trunk/ql/src/protobuf/org/apache/hadoop/hive/ql/io
* /hive/trunk/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc
* /hive/trunk/ql/src/protobuf/org/apache/hadoop/hive/ql/io/orc/orc_proto.proto
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestBitFieldReader.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestDynamicArray.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestFileDump.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInStream.java
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcFile.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestOrcStruct.java
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthByteReader.java
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestRunLengthIntegerReader.java
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestSerializationUtils.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestStreamName.java
* 
/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestStringRedBlackTree.java
* /hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestZlib.java
* /hive/trunk/ql/src/test/resources/orc-file-dump.out

                
> Create a new Optimized Row Columnar file format for Hive
> --------------------------------------------------------
>
>                 Key: HIVE-3874
>                 URL: https://issues.apache.org/jira/browse/HIVE-3874
>             Project: Hive
>          Issue Type: Improvement
>          Components: Serializers/Deserializers
>            Reporter: Owen O'Malley
>            Assignee: Owen O'Malley
>         Attachments: hive.3874.2.patch, HIVE-3874.D8529.1.patch, 
> HIVE-3874.D8529.2.patch, HIVE-3874.D8529.3.patch, HIVE-3874.D8529.4.patch, 
> HIVE-3874.D8871.1.patch, OrcFileIntro.pptx, orc.tgz
>
>
> There are several limitations of the current RC File format that I'd like to 
> address by creating a new format:
> * each column value is stored as a binary blob, which means:
> ** the entire column value must be read, decompressed, and deserialized
> ** the file format can't use smarter type-specific compression
> ** push down filters can't be evaluated
> * the start of each row group needs to be found by scanning
> * user metadata can only be added to the file when the file is created
> * the file doesn't store the number of rows per a file or row group
> * there is no mechanism for seeking to a particular row number, which is 
> required for external indexes.
> * there is no mechanism for storing light weight indexes within the file to 
> enable push-down filters to skip entire row groups.
> * the type of the rows aren't stored in the file

--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira

Reply via email to