[ 
https://issues.apache.org/jira/browse/HADOOP-5281?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ben Maurer updated HADOOP-5281:
-------------------------------

    Attachment: GZt.java

Sorry, the file got munged.. here's a full version of the test

> GzipCodec fails second time it is used in a process
> ---------------------------------------------------
>
>                 Key: HADOOP-5281
>                 URL: https://issues.apache.org/jira/browse/HADOOP-5281
>             Project: Hadoop Core
>          Issue Type: Bug
>    Affects Versions: 0.19.0
>         Environment: 0.19.0 / Linux / amd64
>            Reporter: Ben Maurer
>            Priority: Blocker
>         Attachments: GZt.java
>
>
> The following code raises:
> java.io.IOException: incorrect header check
>       at 
> org.apache.hadoop.io.compress.zlib.ZlibDecompressor.inflateBytesDirect(Native 
> Method)
>       at 
> org.apache.hadoop.io.compress.zlib.ZlibDecompressor.decompress(ZlibDecompressor.java:221)
>       at 
> org.apache.hadoop.io.compress.DecompressorStream.decompress(DecompressorStream.java:80)
>       at 
> org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:74)
>       at 
> org.apache.hadoop.io.compress.DecompressorStream.read(DecompressorStream.java:62)
>       at java.io.DataInputStream.readByte(DataInputStream.java:248)
>       at org.apache.hadoop.io.WritableUtils.readVLong(WritableUtils.java:325)
>       at org.apache.hadoop.io.WritableUtils.readVInt(WritableUtils.java:346)
>       at org.apache.hadoop.io.SequenceFile$Reader.next(SequenceFile.java:1853)
>       at org.apache.hadoop.io.SequenceFile$Reader.next(SequenceFile.java:1876)
>       at org.apache.hadoop.io.MapFile$Reader.readIndex(MapFile.java:319)
>       at org.apache.hadoop.io.MapFile$Reader.seekInternal(MapFile.java:435)
>       at org.apache.hadoop.io.MapFile$Reader.seekInternal(MapFile.java:417)
>       at org.apache.hadoop.io.MapFile$Reader.seek(MapFile.java:404)
>       at org.apache.hadoop.io.MapFile$Reader.get(MapFile.java:523)
> {quote}
> import java.io.IOException;
> import org.apache.hadoop.conf.Configuration;
> import org.apache.hadoop.conf.Configured;
> import org.apache.hadoop.fs.FileSystem;
> import org.apache.hadoop.fs.Path;
> import org.apache.hadoop.io.IntWritable;
> import org.apache.hadoop.io.MapFile;
> import org.apache.hadoop.io.SequenceFile.CompressionType;
> import org.apache.hadoop.io.compress.GzipCodec;
> import org.apache.hadoop.util.Tool;
> import org.apache.hadoop.util.ToolRunner;
> public class GZt extends Configured implements Tool {
>       private FileSystem fileSystem;
>       MapFile.Writer newMap(Path p) throws IOException {
>               return new MapFile.Writer(getConf(), fileSystem, p.toString(),
>                               IntWritable.class, IntWritable.class, 
> CompressionType.BLOCK,
>                               new GzipCodec(), null);
>       }
>       @Override
>       public int run(String[] args) throws Exception {
>               Path p = new Path(args[0]);
>               fileSystem = FileSystem.get(getConf());
>               fileSystem.mkdirs(p);
>               for (int i = 0; i < 2; i++) {
>                       MapFile.Writer w = newMap(new Path(p, "b" + i));
>                       for (int j = 0; j < 100; j++) {
>                               w.append(new IntWritable(j), new 
> IntWritable(j));
>                       }
>                       w.close();
>               }
>               MapFile.Reader rdr = new MapFile.Reader(fileSystem, new Path(p, 
> "b1")
>                               .toString(), getConf());
>               rdr.get(new IntWritable(1), new IntWritable());
>               return 0;
>       }
>       public static void main(String[] args) throws Exception {
>               int res = ToolRunner.run(new Configuration(), new GZt(), args);
>               System.exit(res);
>       }
> }
> {quote}

-- 
This message is automatically generated by JIRA.
-
You can reply to this email to add a comment to the issue online.

Reply via email to