Author: nextgens
Date: 2008-09-29 07:49:04 +0000 (Mon, 29 Sep 2008)
New Revision: 22882
Removed:
trunk/freenet/test/freenet/support/compress/CompressorTest.java
Modified:
trunk/freenet/src/freenet/client/Metadata.java
trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
trunk/freenet/src/freenet/client/async/SingleFileInserter.java
trunk/freenet/src/freenet/client/async/SplitFileInserter.java
trunk/freenet/src/freenet/client/events/StartedCompressionEvent.java
trunk/freenet/src/freenet/keys/Key.java
trunk/freenet/src/freenet/node/fcp/NodeHelloMessage.java
trunk/freenet/src/freenet/node/fcp/StartedCompressionMessage.java
trunk/freenet/src/freenet/support/compress/Compressor.java
trunk/freenet/src/freenet/support/compress/GzipCompressor.java
trunk/freenet/test/freenet/support/compress/GzipCompressorTest.java
Log:
Improve the Compressor thingy: use enums where possible, fix a silly bug
preventing anything but GZIP to be parsed, ... cleanups, ...
There is still a "bug": when an unknown compression algorithm is used we get a
"not in archive" error on fproxy: we ought to get a more precise message!
Modified: trunk/freenet/src/freenet/client/Metadata.java
===================================================================
--- trunk/freenet/src/freenet/client/Metadata.java 2008-09-28 18:25:27 UTC
(rev 22881)
+++ trunk/freenet/src/freenet/client/Metadata.java 2008-09-29 07:49:04 UTC
(rev 22882)
@@ -23,6 +23,7 @@
import freenet.support.Logger;
import freenet.support.api.Bucket;
import freenet.support.api.BucketFactory;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
import freenet.support.io.BucketTools;
@@ -76,8 +77,8 @@
/** Compressed splitfile codec */
short compressionCodec = -1;
- static public final short COMPRESS_GZIP = 0;
- static final short COMPRESS_BZIP2 = 1; // FIXME for future use
+ public static final short COMPRESS_GZIP =
COMPRESSOR_TYPE.GZIP.metadataID;
+ public static final short COMPRESS_BZIP2 = 1;
//COMPRESSOR_TYPE.BZIP2.metadataID
/** The length of the splitfile */
long dataLength;
@@ -214,7 +215,7 @@
if(compressed) {
compressionCodec = dis.readShort();
- if(compressionCodec != COMPRESS_GZIP)
+ if(!COMPRESSOR_TYPE.isValidMetadataID(compressionCodec))
throw new MetadataParseException("Unrecognized
splitfile compression codec "+compressionCodec);
decompressedLength = dis.readLong();
Modified: trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -30,7 +30,7 @@
import freenet.support.Logger;
import freenet.support.api.Bucket;
import freenet.support.compress.CompressionOutputSizeException;
-import freenet.support.compress.Compressor;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
import freenet.support.io.BucketTools;
public class SingleFileFetcher extends SimpleSingleFileFetcher {
@@ -52,7 +52,7 @@
private int recursionLevel;
/** The URI of the currently-being-processed data, for archives etc. */
private FreenetURI thisKey;
- private final LinkedList decompressors;
+ private final LinkedList<COMPRESSOR_TYPE> decompressors;
private final boolean dontTellClientGet;
private final Bucket returnBucket;
/** If true, success/failure is immediately reported to the client, and
therefore we can check TOO_MANY_PATH_COMPONENTS. */
@@ -89,7 +89,7 @@
this.recursionLevel = recursionLevel + 1;
if(recursionLevel > ctx.maxRecursionLevel)
throw new
FetchException(FetchException.TOO_MUCH_RECURSION, "Too much recursion:
"+recursionLevel+" > "+ctx.maxRecursionLevel);
- this.decompressors = new LinkedList();
+ this.decompressors = new LinkedList<COMPRESSOR_TYPE>();
}
/** Copy constructor, modifies a few given fields, don't call
schedule().
@@ -160,6 +160,7 @@
// Parse metadata
try {
metadata = Metadata.construct(data);
+ wrapHandleMetadata(false);
} catch (MetadataParseException e) {
onFailure(new FetchException(e), sched);
return;
@@ -168,7 +169,6 @@
onFailure(new
FetchException(FetchException.BUCKET_ERROR, e), sched);
return;
}
- wrapHandleMetadata(false);
}
}
@@ -186,7 +186,7 @@
if(!decompressors.isEmpty()) {
Bucket data = result.asBucket();
while(!decompressors.isEmpty()) {
- Compressor c = (Compressor)
decompressors.removeLast();
+ COMPRESSOR_TYPE c = decompressors.removeLast();
try {
long maxLen =
Math.max(ctx.maxTempLength, ctx.maxOutputLength);
data = c.decompress(data,
ctx.bucketFactory, maxLen, maxLen * 4, decompressors.isEmpty() ? returnBucket :
null);
@@ -442,7 +442,7 @@
if((redirectedKey instanceof ClientCHK) &&
!((ClientCHK)redirectedKey).isMetadata())
rcb.onBlockSetFinished(this);
if(metadata.isCompressed()) {
- Compressor codec =
Compressor.getCompressionAlgorithmByMetadataID(metadata.getCompressionCodec());
+ COMPRESSOR_TYPE codec =
COMPRESSOR_TYPE.getCompressorByMetadataID(metadata.getCompressionCodec());
f.addDecompressor(codec);
}
parent.onTransition(this, f);
@@ -478,7 +478,7 @@
// Splitfile (possibly compressed)
if(metadata.isCompressed()) {
- Compressor codec =
Compressor.getCompressionAlgorithmByMetadataID(metadata.getCompressionCodec());
+ COMPRESSOR_TYPE codec =
COMPRESSOR_TYPE.getCompressorByMetadataID(metadata.getCompressionCodec());
addDecompressor(codec);
}
@@ -539,7 +539,7 @@
return name;
}
- private void addDecompressor(Compressor codec) {
+ private void addDecompressor(COMPRESSOR_TYPE codec) {
decompressors.addLast(codec);
}
Modified: trunk/freenet/src/freenet/client/async/SingleFileInserter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SingleFileInserter.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/client/async/SingleFileInserter.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -20,7 +20,7 @@
import freenet.support.SimpleFieldSet;
import freenet.support.api.Bucket;
import freenet.support.compress.CompressionOutputSizeException;
-import freenet.support.compress.Compressor;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
import freenet.support.io.BucketChainBucketFactory;
import freenet.support.io.BucketTools;
@@ -155,21 +155,21 @@
throw new InsertException(InsertException.INVALID_URI,
"Unknown key type: "+type, null);
}
- Compressor bestCodec = null;
+ COMPRESSOR_TYPE bestCodec = null;
Bucket bestCompressedData = null;
boolean tryCompress = (origSize > blockSize) &&
(!ctx.dontCompress) && (!dontCompress);
if(tryCompress) {
+ if(logMINOR) Logger.minor(this, "Attempt to compress
the data");
// Try to compress the data.
// Try each algorithm, starting with the fastest and
weakest.
// Stop when run out of algorithms, or the compressed
data fits in a single block.
- int algos = Compressor.countCompressAlgorithms();
try {
- for(int i=0;i<algos;i++) {
+ for(COMPRESSOR_TYPE comp :
COMPRESSOR_TYPE.values()) {
+ if(logMINOR) Logger.minor(this,
"Attempt to compress using "+comp);
// Only produce if we are compressing
*the original data*
if(parent == cb)
-
ctx.eventProducer.produceEvent(new StartedCompressionEvent(i));
- Compressor comp =
Compressor.getCompressionAlgorithmByDifficulty(i);
+
ctx.eventProducer.produceEvent(new StartedCompressionEvent(comp));
Bucket result;
result = comp.compress(origData, new
BucketChainBucketFactory(ctx.persistentBucketFactory, CHKBlock.DATA_LENGTH),
origData.size());
if(result.size() <
oneBlockCompressedSize) {
@@ -199,20 +199,22 @@
}
boolean freeData = false;
if(bestCompressedData != null) {
+ long compressedSize = bestCompressedData.size();
+ if(logMINOR) Logger.minor(this, "The best compression
algorithm is "+bestCodec+ " we have a "+origSize/compressedSize+" ratio!
("+origSize+'/'+compressedSize+')');
data = bestCompressedData;
freeData = true;
}
if(parent == cb) {
if(tryCompress)
- ctx.eventProducer.produceEvent(new
FinishedCompressionEvent(bestCodec == null ? -1 :
bestCodec.codecNumberForMetadata(), origSize, data.size()));
+ ctx.eventProducer.produceEvent(new
FinishedCompressionEvent(bestCodec == null ? -1 : bestCodec.metadataID,
origSize, data.size()));
if(logMINOR) Logger.minor(this, "Compressed
"+origSize+" to "+data.size()+" on "+this);
}
// Compressed data
// Insert it...
- short codecNumber = bestCodec == null ? -1 :
bestCodec.codecNumberForMetadata();
+ short codecNumber = bestCodec == null ? -1 :
bestCodec.metadataID;
long compressedDataSize = data.size();
boolean fitsInOneBlockAsIs = bestCodec == null ?
compressedDataSize < blockSize : compressedDataSize < oneBlockCompressedSize;
boolean fitsInOneCHK = bestCodec == null ? compressedDataSize <
CHKBlock.DATA_LENGTH : compressedDataSize < CHKBlock.MAX_COMPRESSED_DATA_LENGTH;
Modified: trunk/freenet/src/freenet/client/async/SplitFileInserter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SplitFileInserter.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/client/async/SplitFileInserter.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -17,7 +17,7 @@
import freenet.support.Logger;
import freenet.support.SimpleFieldSet;
import freenet.support.api.Bucket;
-import freenet.support.compress.Compressor;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
import freenet.support.io.BucketTools;
public class SplitFileInserter implements ClientPutState {
@@ -66,7 +66,7 @@
return fs;
}
- public SplitFileInserter(BaseClientPutter put, PutCompletionCallback
cb, Bucket data, Compressor bestCodec, long decompressedLength, ClientMetadata
clientMetadata, InsertContext ctx, boolean getCHKOnly, boolean isMetadata,
Object token, boolean insertAsArchiveManifest, boolean freeData) throws
InsertException {
+ public SplitFileInserter(BaseClientPutter put, PutCompletionCallback
cb, Bucket data, COMPRESSOR_TYPE bestCodec, long decompressedLength,
ClientMetadata clientMetadata, InsertContext ctx, boolean getCHKOnly, boolean
isMetadata, Object token, boolean insertAsArchiveManifest, boolean freeData)
throws InsertException {
logMINOR = Logger.shouldLog(Logger.MINOR, this);
this.parent = put;
this.insertAsArchiveManifest = insertAsArchiveManifest;
@@ -90,7 +90,7 @@
if(bestCodec == null)
compressionCodec = -1;
else
- compressionCodec = bestCodec.codecNumberForMetadata();
+ compressionCodec = bestCodec.metadataID;
this.splitfileAlgorithm = ctx.splitfileAlgorithm;
segmentSize = ctx.splitfileSegmentDataBlocks;
checkSegmentSize = splitfileAlgorithm ==
Metadata.SPLITFILE_NONREDUNDANT ? 0 : ctx.splitfileSegmentCheckBlocks;
Modified: trunk/freenet/src/freenet/client/events/StartedCompressionEvent.java
===================================================================
--- trunk/freenet/src/freenet/client/events/StartedCompressionEvent.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/client/events/StartedCompressionEvent.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -3,21 +3,23 @@
* http://www.gnu.org/ for further details of the GPL. */
package freenet.client.events;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
+
/**
* Event indicating that we are attempting to compress the file.
*/
public class StartedCompressionEvent implements ClientEvent {
- public final int codec;
+ public final COMPRESSOR_TYPE codec;
- public StartedCompressionEvent(int codec) {
+ public StartedCompressionEvent(COMPRESSOR_TYPE codec) {
this.codec = codec;
}
- static final int code = 0x08;
+ final static int code = 0x08;
public String getDescription() {
- return "Started compression attempt with codec "+codec;
+ return "Started compression attempt with "+codec.name;
}
public int getCode() {
Modified: trunk/freenet/src/freenet/keys/Key.java
===================================================================
--- trunk/freenet/src/freenet/keys/Key.java 2008-09-28 18:25:27 UTC (rev
22881)
+++ trunk/freenet/src/freenet/keys/Key.java 2008-09-29 07:49:04 UTC (rev
22882)
@@ -19,7 +19,7 @@
import freenet.support.api.Bucket;
import freenet.support.api.BucketFactory;
import freenet.support.compress.CompressionOutputSizeException;
-import freenet.support.compress.Compressor;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
import freenet.support.io.ArrayBucket;
import freenet.support.io.ArrayBucketFactory;
import freenet.support.io.BucketTools;
@@ -141,7 +141,7 @@
(output[3] & 0xff);
if(len > maxLength)
throw new TooBigException("Invalid precompressed size: "+len +
" maxlength="+maxLength);
- Compressor decompressor =
Compressor.getCompressionAlgorithmByMetadataID(compressionAlgorithm);
+ COMPRESSOR_TYPE decompressor =
COMPRESSOR_TYPE.getCompressorByMetadataID(compressionAlgorithm);
Bucket inputBucket = new SimpleReadOnlyArrayBucket(output,
shortLength?2:4, outputLength-(shortLength?2:4));
try {
return decompressor.decompress(inputBucket, bf,
maxLength, -1, null);
@@ -184,9 +184,7 @@
} else {
if (sourceData.size() > maxCompressedDataLength) {
// Determine the best algorithm
- for (int i = 0; i <
Compressor.countCompressAlgorithms(); i++) {
- Compressor comp = Compressor
-
.getCompressionAlgorithmByDifficulty(i);
+ for (COMPRESSOR_TYPE comp :
COMPRESSOR_TYPE.values()) {
ArrayBucket compressedData;
try {
compressedData =
(ArrayBucket) comp.compress(
@@ -197,8 +195,7 @@
continue;
}
if (compressedData.size() <=
maxCompressedDataLength) {
- compressionAlgorithm =
comp
-
.codecNumberForMetadata();
+ compressionAlgorithm =
comp.metadataID;
sourceLength =
sourceData.size();
try {
cbuf =
BucketTools.toByteArray(compressedData);
Modified: trunk/freenet/src/freenet/node/fcp/NodeHelloMessage.java
===================================================================
--- trunk/freenet/src/freenet/node/fcp/NodeHelloMessage.java 2008-09-28
18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/node/fcp/NodeHelloMessage.java 2008-09-29
07:49:04 UTC (rev 22882)
@@ -42,7 +42,7 @@
sfs.put("ExtBuild", NodeStarter.extBuildNumber);
sfs.putSingle("ExtRevision", NodeStarter.extRevisionNumber);
sfs.putSingle("Testnet", Boolean.toString(node == null ? false
: node.isTestnetEnabled()));
- sfs.putSingle("CompressionCodecs",
Integer.toString(Compressor.countCompressAlgorithms()));
+ sfs.putSingle("CompressionCodecs",
Integer.toString(Compressor.COMPRESSOR_TYPE.values().length));
sfs.putSingle("ConnectionIdentifier", id);
sfs.putSingle("NodeLanguage",
L10n.getSelectedLanguage().toString());
return sfs;
Modified: trunk/freenet/src/freenet/node/fcp/StartedCompressionMessage.java
===================================================================
--- trunk/freenet/src/freenet/node/fcp/StartedCompressionMessage.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/node/fcp/StartedCompressionMessage.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -5,15 +5,16 @@
import freenet.node.Node;
import freenet.support.SimpleFieldSet;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
public class StartedCompressionMessage extends FCPMessage {
final String identifier;
final boolean global;
- final int codec;
+ final COMPRESSOR_TYPE codec;
- public StartedCompressionMessage(String identifier, boolean global, int
codec) {
+ public StartedCompressionMessage(String identifier, boolean global,
COMPRESSOR_TYPE codec) {
this.identifier = identifier;
this.codec = codec;
this.global = global;
@@ -23,7 +24,7 @@
public SimpleFieldSet getFieldSet() {
SimpleFieldSet fs = new SimpleFieldSet(true);
fs.putSingle("Identifier", identifier);
- fs.put("Codec", codec);
+ fs.putSingle("Codec", codec.name);
if(global) fs.putSingle("Global", "true");
return fs;
}
Modified: trunk/freenet/src/freenet/support/compress/Compressor.java
===================================================================
--- trunk/freenet/src/freenet/support/compress/Compressor.java 2008-09-28
18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/support/compress/Compressor.java 2008-09-29
07:49:04 UTC (rev 22882)
@@ -1,3 +1,6 @@
+/* This code is part of Freenet. It is distributed under the GNU General
+* Public License, version 2 (or at your option any later version). See
+* http://www.gnu.org/ for further details of the GPL. */
package freenet.support.compress;
import java.io.IOException;
@@ -2,3 +5,2 @@
-import freenet.client.Metadata;
import freenet.support.api.Bucket;
@@ -10,10 +12,52 @@
* A data compressor. Contains methods to get all data compressors.
* This is for single-file compression (gzip, bzip2) as opposed to archives.
*/
-public abstract class Compressor {
+public interface Compressor {
- public static final Compressor GZIP = new GzipCompressor();
+ public enum COMPRESSOR_TYPE implements Compressor {
+ // They will be tried in order: put the less resource consuming
first
+ GZIP("GZIP", new GzipCompressor(), (short) 0);
+ // BZIP2("BZIP2", new Bzip2Compressor(), (short) 1);
+
+ public final String name;
+ public final Compressor compressor;
+ public final short metadataID;
+
+ COMPRESSOR_TYPE(String name, Compressor c, short metadataID) {
+ this.name = name;
+ this.compressor = c;
+ this.metadataID = metadataID;
+ }
+
+ public static COMPRESSOR_TYPE getCompressorByMetadataID(short
id) {
+ COMPRESSOR_TYPE[] values = values();
+ for(COMPRESSOR_TYPE current : values)
+ if(current.metadataID == id)
+ return current;
+ return null;
+ }
+
+ public static boolean isValidMetadataID(short id) {
+ COMPRESSOR_TYPE[] values = values();
+ for(COMPRESSOR_TYPE current : values)
+ if(current.metadataID == id)
+ return true;
+ return false;
+ }
+ public Bucket compress(Bucket data, BucketFactory bf, long
maxLength) throws IOException, CompressionOutputSizeException {
+ return compressor.compress(data, bf, maxLength);
+ }
+
+ public Bucket decompress(Bucket data, BucketFactory
bucketFactory, long maxLength, long maxEstimateSizeLength, Bucket preferred)
throws IOException, CompressionOutputSizeException {
+ return compressor.decompress(data, bucketFactory,
maxLength, maxEstimateSizeLength, preferred);
+ }
+
+ public int decompress(byte[] dbuf, int i, int j, byte[] output)
throws CompressionOutputSizeException {
+ return compressor.decompress(dbuf, i, j, output);
+ }
+ }
+
public abstract Bucket compress(Bucket data, BucketFactory bf, long
maxLength) throws IOException, CompressionOutputSizeException;
/**
@@ -29,28 +73,6 @@
*/
public abstract Bucket decompress(Bucket data, BucketFactory
bucketFactory, long maxLength, long maxEstimateSizeLength, Bucket preferred)
throws IOException, CompressionOutputSizeException;
- public abstract short codecNumberForMetadata();
-
- /** Count the number of distinct compression algorithms currently
supported. */
- public static int countCompressAlgorithms() {
- // FIXME we presently only support gzip. This should change in
future.
- return 1;
- }
-
- public static Compressor getCompressionAlgorithmByDifficulty(int i) {
- if(i == 0)
- return GZIP;
- // FIXME when we get more compression algos, put them here.
- return null;
- }
-
- public static Compressor getCompressionAlgorithmByMetadataID(short
algo) {
- if(algo == Metadata.COMPRESS_GZIP)
- return GZIP;
- // FIXME when we get more compression algos, put them here.
- return null;
- }
-
/** Decompress in RAM only.
* @param dbuf Input buffer.
* @param i Offset to start reading from.
Modified: trunk/freenet/src/freenet/support/compress/GzipCompressor.java
===================================================================
--- trunk/freenet/src/freenet/support/compress/GzipCompressor.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/src/freenet/support/compress/GzipCompressor.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -1,6 +1,5 @@
package freenet.support.compress;
-import freenet.client.Metadata;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -13,7 +12,7 @@
import freenet.support.api.Bucket;
import freenet.support.api.BucketFactory;
-public class GzipCompressor extends Compressor {
+public class GzipCompressor implements Compressor {
@Override
public Bucket compress(Bucket data, BucketFactory bf, long maxLength)
throws IOException, CompressionOutputSizeException {
@@ -114,9 +113,4 @@
System.arraycopy(buf, 0, output, 0, bytes);
return bytes;
}
-
- @Override
- public short codecNumberForMetadata() {
- return Metadata.COMPRESS_GZIP;
- }
}
Deleted: trunk/freenet/test/freenet/support/compress/CompressorTest.java
===================================================================
--- trunk/freenet/test/freenet/support/compress/CompressorTest.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/test/freenet/support/compress/CompressorTest.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -1,52 +0,0 @@
-/*
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, write to the Free Software
- * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
- */
-
-package freenet.support.compress;
-
-import junit.framework.TestCase;
-
-/**
- * Test case for {@link freenet.support.compress.Compressor} class.
- *
- * @author stuart martin <wavey at freenetproject.org>
- */
-public class CompressorTest extends TestCase {
-
- /**
- * test abstract class and accessors for logical consistency
- */
- public void testCompressor(){
-
- // force us to notice when we modify the number of supported
compressors
- int algos = Compressor.countCompressAlgorithms();
- assertEquals(1, algos);
-
- for(int i = 0; i < algos; i++){
- Compressor compressorByDifficulty =
-
Compressor.getCompressionAlgorithmByDifficulty(i); // FIXME: int vs.
short
- Compressor compressorByMetadataId =
-
Compressor.getCompressionAlgorithmByMetadataID((short)i); // FIXME: int vs.
short
-
- // check the codec number equals the index into the
algorithm list
-
assertEquals(i,compressorByDifficulty.codecNumberForMetadata());
-
- // check that the compressor obtained by difficulty
index is the same
- // as the compressor obtained by metadata id
- assertEquals(compressorByDifficulty,
compressorByMetadataId);
- }
- }
-
-}
Modified: trunk/freenet/test/freenet/support/compress/GzipCompressorTest.java
===================================================================
--- trunk/freenet/test/freenet/support/compress/GzipCompressorTest.java
2008-09-28 18:25:27 UTC (rev 22881)
+++ trunk/freenet/test/freenet/support/compress/GzipCompressorTest.java
2008-09-29 07:49:04 UTC (rev 22882)
@@ -32,7 +32,7 @@
*/
public class GzipCompressorTest extends TestCase {
- private static final String UNCOMPRESSED_DATA_1 =
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
+ public static final String UNCOMPRESSED_DATA_1 =
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"
+
"aksjdhaskjsdhaskjdhaksjdhkajsdhkasdhkqhdioqahdkashdkashdnkashdnaskdhnkasjhdnkasjhdnkasjhdnkasjhdnkasjhdnkashdnkasjhdnkasjhdnkasjhndkasjhdna"
+
"djjjjjjjjjjjjjjj3j12j312j312j312j31j23hj123niah1ia3h1iu2b321uiab31ugb312gba38gab23igb12i3ag1b2ig3bi1g3bi1gba3iu12ba3iug1bi3ug1b2i3gab1i2ua3";
@@ -47,8 +47,8 @@
* test GZIP compressor's identity and functionality
*/
public void testGzipCompressor() {
- GzipCompressor gzipCompressor = (GzipCompressor)
Compressor.GZIP;
- Compressor compressorZero =
Compressor.getCompressionAlgorithmByMetadataID((short) 0);
+ Compressor.COMPRESSOR_TYPE gzipCompressor =
Compressor.COMPRESSOR_TYPE.GZIP;
+ Compressor compressorZero =
Compressor.COMPRESSOR_TYPE.getCompressorByMetadataID((short)0);
// check GZIP is the first compressor
assertEquals(gzipCompressor, compressorZero);
@@ -94,7 +94,7 @@
int writtenBytes = 0;
try {
- writtenBytes =
Compressor.GZIP.decompress(compressedData, 0, compressedData.length,
outUncompressedData);
+ writtenBytes =
Compressor.COMPRESSOR_TYPE.GZIP.decompress(compressedData, 0,
compressedData.length, outUncompressedData);
} catch (CompressionOutputSizeException e) {
fail("unexpected exception thrown : " + e.getMessage());
}
@@ -115,7 +115,7 @@
BucketFactory factory = new ArrayBucketFactory();
try {
- Compressor.GZIP.compress(inBucket, factory, 32);
+ Compressor.COMPRESSOR_TYPE.GZIP.compress(inBucket,
factory, 32);
} catch (IOException e) {
fail("unexpected exception thrown : " + e.getMessage());
} catch (CompressionOutputSizeException e) {
@@ -137,7 +137,7 @@
BucketFactory factory = new ArrayBucketFactory();
try {
- Compressor.GZIP.decompress(inBucket, factory, 4096 +
10, 4096 + 20, null);
+ Compressor.COMPRESSOR_TYPE.GZIP.decompress(inBucket,
factory, 4096 + 10, 4096 + 20, null);
} catch (IOException e) {
fail("unexpected exception thrown : " + e.getMessage());
} catch (CompressionOutputSizeException e) {
@@ -152,7 +152,7 @@
Bucket outBucket = null;
try {
- outBucket = Compressor.GZIP.decompress(inBucket,
factory, 32768, 32768 * 2, null);
+ outBucket =
Compressor.COMPRESSOR_TYPE.GZIP.decompress(inBucket, factory, 32768, 32768 * 2,
null);
} catch (IOException e) {
fail("unexpected exception thrown : " + e.getMessage());
} catch (CompressionOutputSizeException e) {
@@ -184,7 +184,7 @@
Bucket outBucket = null;
try {
- outBucket = Compressor.GZIP.compress(inBucket, factory,
32768);
+ outBucket =
Compressor.COMPRESSOR_TYPE.GZIP.compress(inBucket, factory, 32768);
} catch (IOException e) {
fail("unexpected exception thrown : " + e.getMessage());
} catch (CompressionOutputSizeException e) {