Author: omalley
Date: Thu Jan 8 13:48:14 2009
New Revision: 732838
URL: http://svn.apache.org/viewvc?rev=732838&view=rev
Log:
HADOOP-4950. Make the CompressorStream, DecompressorStream,
BlockCompressorStream, and BlockDecompressorStream public to facilitate
non-Hadoop codecs. (omalley)
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockCompressorStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockDecompressorStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressorStream.java
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/DecompressorStream.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=732838&r1=732837&r2=732838&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Jan 8 13:48:14 2009
@@ -336,6 +336,10 @@
HADOOP-4916. Make user/location of Chukwa installation configurable by an
external properties file. (Eric Yang via cdouglas)
+ HADOOP-4950. Make the CompressorStream, DecompressorStream,
+ BlockCompressorStream, and BlockDecompressorStream public to facilitate
+ non-Hadoop codecs. (omalley)
+
OPTIMIZATIONS
HADOOP-3293. Fixes FileInputFormat to do provide locations for splits
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockCompressorStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockCompressorStream.java?rev=732838&r1=732837&r2=732838&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockCompressorStream.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockCompressorStream.java
Thu Jan 8 13:48:14 2009
@@ -31,7 +31,7 @@
* {...@link org.apache.hadoop.io.compress.Compressor} requires buffering to
* effect meaningful compression, it is responsible for it.
*/
-class BlockCompressorStream extends CompressorStream {
+public class BlockCompressorStream extends CompressorStream {
// The 'maximum' size of input data to be compressed, to account
// for the overhead of the compression algorithm.
@@ -137,7 +137,7 @@
}
}
- void compress() throws IOException {
+ protected void compress() throws IOException {
int len = compressor.compress(buffer, 0, buffer.length);
if (len > 0) {
// Write out the compressed chunk
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockDecompressorStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockDecompressorStream.java?rev=732838&r1=732837&r2=732838&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockDecompressorStream.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/BlockDecompressorStream.java
Thu Jan 8 13:48:14 2009
@@ -28,7 +28,7 @@
* 'stream-based' compression algorithms.
*
*/
-class BlockDecompressorStream extends DecompressorStream {
+public class BlockDecompressorStream extends DecompressorStream {
private int originalBlockSize = 0;
private int noUncompressedBytes = 0;
@@ -58,7 +58,7 @@
super(in);
}
- int decompress(byte[] b, int off, int len) throws IOException {
+ protected int decompress(byte[] b, int off, int len) throws IOException {
// Check if we are the beginning of a block
if (noUncompressedBytes == originalBlockSize) {
// Get original data size
@@ -89,7 +89,7 @@
return n;
}
- void getCompressedData() throws IOException {
+ protected void getCompressedData() throws IOException {
checkStream();
// Get the size of the compressed chunk
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressorStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressorStream.java?rev=732838&r1=732837&r2=732838&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressorStream.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressorStream.java
Thu Jan 8 13:48:14 2009
@@ -24,10 +24,10 @@
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
-class CompressorStream extends CompressionOutputStream {
- Compressor compressor;
- byte[] buffer;
- boolean closed = false;
+public class CompressorStream extends CompressionOutputStream {
+ protected Compressor compressor;
+ protected byte[] buffer;
+ protected boolean closed = false;
public CompressorStream(OutputStream out, Compressor compressor, int
bufferSize) {
super(out);
@@ -72,7 +72,7 @@
}
}
- void compress() throws IOException {
+ protected void compress() throws IOException {
int len = compressor.compress(buffer, 0, buffer.length);
if (len > 0) {
out.write(buffer, 0, len);
@@ -100,7 +100,7 @@
}
}
- byte[] oneByte = new byte[1];
+ private byte[] oneByte = new byte[1];
public void write(int b) throws IOException {
oneByte[0] = (byte)(b & 0xff);
write(oneByte, 0, oneByte.length);
Modified:
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/DecompressorStream.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/DecompressorStream.java?rev=732838&r1=732837&r2=732838&view=diff
==============================================================================
---
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/DecompressorStream.java
(original)
+++
hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/DecompressorStream.java
Thu Jan 8 13:48:14 2009
@@ -24,11 +24,11 @@
import org.apache.hadoop.io.compress.Decompressor;
-class DecompressorStream extends CompressionInputStream {
- Decompressor decompressor = null;
- byte[] buffer;
- boolean eof = false;
- boolean closed = false;
+public class DecompressorStream extends CompressionInputStream {
+ protected Decompressor decompressor = null;
+ protected byte[] buffer;
+ protected boolean eof = false;
+ protected boolean closed = false;
public DecompressorStream(InputStream in, Decompressor decompressor, int
bufferSize) {
super(in);
@@ -56,7 +56,7 @@
super(in);
}
- byte[] oneByte = new byte[1];
+ private byte[] oneByte = new byte[1];
public int read() throws IOException {
checkStream();
return (read(oneByte, 0, oneByte.length) == -1) ? -1 : (oneByte[0] & 0xff);
@@ -74,7 +74,7 @@
return decompress(b, off, len);
}
- int decompress(byte[] b, int off, int len) throws IOException {
+ protected int decompress(byte[] b, int off, int len) throws IOException {
int n = 0;
while ((n = decompressor.decompress(b, off, len)) == 0) {
@@ -90,7 +90,7 @@
return n;
}
- void getCompressedData() throws IOException {
+ protected void getCompressedData() throws IOException {
checkStream();
int n = in.read(buffer, 0, buffer.length);
@@ -101,7 +101,7 @@
decompressor.setInput(buffer, 0, n);
}
- void checkStream() throws IOException {
+ protected void checkStream() throws IOException {
if (closed) {
throw new IOException("Stream closed");
}
@@ -111,7 +111,7 @@
decompressor.reset();
}
- byte[] skipBytes = new byte[512];
+ private byte[] skipBytes = new byte[512];
public long skip(long n) throws IOException {
// Sanity checks
if (n < 0) {