Author: toad
Date: 2006-10-31 17:40:58 +0000 (Tue, 31 Oct 2006)
New Revision: 10757

Modified:
   trunk/freenet/src/freenet/client/async/SingleFileInserter.java
   trunk/freenet/src/freenet/keys/CHKBlock.java
   trunk/freenet/src/freenet/keys/ClientCHKBlock.java
   trunk/freenet/src/freenet/keys/InsertableClientSSK.java
   trunk/freenet/src/freenet/keys/Key.java
   trunk/freenet/src/freenet/keys/SSKBlock.java
Log:
Fix bug related to compressed data being too big to fit into a block after 
including the length of the original data.

Modified: trunk/freenet/src/freenet/client/async/SingleFileInserter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SingleFileInserter.java      
2006-10-31 17:39:43 UTC (rev 10756)
+++ trunk/freenet/src/freenet/client/async/SingleFileInserter.java      
2006-10-31 17:40:58 UTC (rev 10757)
@@ -130,14 +130,17 @@
                Bucket origData = block.getData();
                Bucket data = origData;
                int blockSize;
+               int oneBlockCompressedSize;
                boolean dontCompress = ctx.dontCompress;

                long origSize = data.size();
                String type = block.desiredURI.getKeyType().toUpperCase();
                if(type.equals("SSK") || type.equals("KSK") || 
type.equals("USK")) {
                        blockSize = SSKBlock.DATA_LENGTH;
+                       oneBlockCompressedSize = 
SSKBlock.MAX_COMPRESSED_DATA_LENGTH;
                } else if(type.equals("CHK")) {
                        blockSize = CHKBlock.DATA_LENGTH;
+                       oneBlockCompressedSize = 
CHKBlock.MAX_COMPRESSED_DATA_LENGTH;
                } else {
                        throw new 
InserterException(InserterException.INVALID_URI, "Unknown key type: "+type, 
null);
                }
@@ -157,8 +160,8 @@
                                                
ctx.eventProducer.produceEvent(new StartedCompressionEvent(i));
                                        Compressor comp = 
Compressor.getCompressionAlgorithmByDifficulty(i);
                                        Bucket result;
-                                       result = comp.compress(origData, 
ctx.persistentBucketFactory, Long.MAX_VALUE);
-                                       if(result.size() < blockSize) {
+                                       result = comp.compress(origData, 
ctx.persistentBucketFactory, origData.size());
+                                       if(result.size() < 
oneBlockCompressedSize) {
                                                bestCodec = comp;
                                                data = result;
                                                if(bestCompressedData != null)
@@ -194,13 +197,16 @@

                // Insert it...
                short codecNumber = bestCodec == null ? -1 : 
bestCodec.codecNumberForMetadata();
+               long compressedDataSize = data.size();
+               boolean fitsInOneBlockAsIs = bestCodec == null ? 
compressedDataSize < blockSize : compressedDataSize < oneBlockCompressedSize;
+               boolean fitsInOneCHK = bestCodec == null ? compressedDataSize < 
CHKBlock.DATA_LENGTH : compressedDataSize < CHKBlock.MAX_COMPRESSED_DATA_LENGTH;

                if(block.getData().size() > Integer.MAX_VALUE)
                        throw new 
InserterException(InserterException.INTERNAL_ERROR, "2GB+ should not encode to 
one block!", null);

                boolean noMetadata = ((block.clientMetadata == null) || 
block.clientMetadata.isTrivial()) && targetFilename == null;
                if(noMetadata && !insertAsArchiveManifest) {
-                       if(data.size() < blockSize) {
+                       if(fitsInOneBlockAsIs) {
                                // Just insert it
                                ClientPutState bi =
                                        createInserter(parent, data, 
codecNumber, block.desiredURI, ctx, cb, metadata, (int)block.getData().size(), 
-1, getCHKOnly, true, true);
@@ -210,7 +216,7 @@
                                return;
                        }
                }
-               if (data.size() < ClientCHKBlock.MAX_COMPRESSED_DATA_LENGTH) {
+               if (fitsInOneCHK) {
                        // Insert single block, then insert pointer to it
                        if(reportMetadataOnly) {
                                SingleBlockInserter dataPutter = new 
SingleBlockInserter(parent, data, codecNumber, FreenetURI.EMPTY_CHK_URI, ctx, 
cb, metadata, (int)origSize, -1, getCHKOnly, true, true, token);

Modified: trunk/freenet/src/freenet/keys/CHKBlock.java
===================================================================
--- trunk/freenet/src/freenet/keys/CHKBlock.java        2006-10-31 17:39:43 UTC 
(rev 10756)
+++ trunk/freenet/src/freenet/keys/CHKBlock.java        2006-10-31 17:40:58 UTC 
(rev 10757)
@@ -19,6 +19,8 @@
     public static final int MAX_LENGTH_BEFORE_COMPRESSION = Integer.MAX_VALUE;
     public static final int TOTAL_HEADERS_LENGTH = 36;
     public static final int DATA_LENGTH = 32768;
+    /* Maximum length of compressed payload */
+       public static final int MAX_COMPRESSED_DATA_LENGTH = DATA_LENGTH - 4;

     public String toString() {
         return super.toString()+": chk="+chk;

Modified: trunk/freenet/src/freenet/keys/ClientCHKBlock.java
===================================================================
--- trunk/freenet/src/freenet/keys/ClientCHKBlock.java  2006-10-31 17:39:43 UTC 
(rev 10756)
+++ trunk/freenet/src/freenet/keys/ClientCHKBlock.java  2006-10-31 17:40:58 UTC 
(rev 10757)
@@ -26,7 +26,6 @@
  */
 public class ClientCHKBlock extends CHKBlock implements ClientKeyBlock {

-    public static final long MAX_COMPRESSED_DATA_LENGTH = NodeCHK.BLOCK_SIZE - 
4;
        final ClientCHK key;

     public String toString() {
@@ -134,7 +133,7 @@
         ClientCHK key;
         short compressionAlgorithm = -1;
         try {
-                       Compressed comp = Key.compress(sourceData, 
dontCompress, alreadyCompressedCodec, sourceLength, 
MAX_LENGTH_BEFORE_COMPRESSION, MAX_COMPRESSED_DATA_LENGTH, false);
+                       Compressed comp = Key.compress(sourceData, 
dontCompress, alreadyCompressedCodec, sourceLength, 
MAX_LENGTH_BEFORE_COMPRESSION, CHKBlock.MAX_COMPRESSED_DATA_LENGTH, false);
                        finalData = comp.compressedData;
                        compressionAlgorithm = comp.compressionAlgorithm;
                } catch (KeyEncodeException e2) {

Modified: trunk/freenet/src/freenet/keys/InsertableClientSSK.java
===================================================================
--- trunk/freenet/src/freenet/keys/InsertableClientSSK.java     2006-10-31 
17:39:43 UTC (rev 10756)
+++ trunk/freenet/src/freenet/keys/InsertableClientSSK.java     2006-10-31 
17:40:58 UTC (rev 10757)
@@ -73,6 +73,9 @@
             byte[] digest = md256.digest();
             MersenneTwister mt = new MersenneTwister(digest);
             data = new byte[SSKBlock.DATA_LENGTH];
+            if(compressedData.length > data.length) {
+               throw new RuntimeException("compressedData.length = 
"+compressedData.length+" but data.length="+data.length);
+            }
             System.arraycopy(compressedData, 0, data, 0, 
compressedData.length);
             byte[] randomBytes = new 
byte[SSKBlock.DATA_LENGTH-compressedData.length];
             mt.nextBytes(randomBytes);

Modified: trunk/freenet/src/freenet/keys/Key.java
===================================================================
--- trunk/freenet/src/freenet/keys/Key.java     2006-10-31 17:39:43 UTC (rev 
10756)
+++ trunk/freenet/src/freenet/keys/Key.java     2006-10-31 17:40:58 UTC (rev 
10757)
@@ -133,22 +133,27 @@
        short compressionAlgorithm;
     }

-    static Compressed compress(Bucket sourceData, boolean dontCompress, short 
alreadyCompressedCodec, long sourceLength, long MAX_LENGTH_BEFORE_COMPRESSION, 
long MAX_COMPRESSED_DATA_LENGTH, boolean shortLength) throws 
KeyEncodeException, IOException {
+    static Compressed compress(Bucket sourceData, boolean dontCompress, short 
alreadyCompressedCodec, long sourceLength, long MAX_LENGTH_BEFORE_COMPRESSION, 
int MAX_COMPRESSED_DATA_LENGTH, boolean shortLength) throws KeyEncodeException, 
IOException {
        byte[] finalData = null;
         short compressionAlgorithm = -1;
+        int maxCompressedDataLength = MAX_COMPRESSED_DATA_LENGTH;
+        if(shortLength)
+               maxCompressedDataLength -= 2;
+        else
+               maxCompressedDataLength -= 4;
         if(sourceData.size() > MAX_LENGTH_BEFORE_COMPRESSION)
             throw new KeyEncodeException("Too big");
         if((!dontCompress) || (alreadyCompressedCodec >= 0)) {
                byte[] cbuf = null;
                if(alreadyCompressedCodec >= 0) {
-                       if(sourceData.size() > MAX_COMPRESSED_DATA_LENGTH)
+                       if(sourceData.size() > maxCompressedDataLength)
                                throw new KeyEncodeException("Too big 
(precompressed)");
                        compressionAlgorithm = alreadyCompressedCodec;
                        cbuf = BucketTools.toByteArray(sourceData);
                        if(sourceLength > MAX_LENGTH_BEFORE_COMPRESSION)
                                throw new CHKEncodeException("Too big");
                } else {
-                       if (sourceData.size() > MAX_COMPRESSED_DATA_LENGTH) {
+                       if (sourceData.size() > maxCompressedDataLength) {
                                        // Determine the best algorithm
                                        for (int i = 0; i < 
Compressor.countCompressAlgorithms(); i++) {
                                                Compressor comp = Compressor
@@ -156,13 +161,13 @@
                                                ArrayBucket compressedData;
                                                try {
                                                        compressedData = 
(ArrayBucket) comp.compress(
-                                                                       
sourceData, new ArrayBucketFactory(), MAX_COMPRESSED_DATA_LENGTH);
+                                                                       
sourceData, new ArrayBucketFactory(), maxCompressedDataLength);
                                                } catch (IOException e) {
                                                        throw new Error(e);
                                                } catch 
(CompressionOutputSizeException e) {
                                                        continue;
                                                }
-                                               if (compressedData.size() <= 
MAX_COMPRESSED_DATA_LENGTH) {
+                                               if (compressedData.size() <= 
maxCompressedDataLength) {
                                                        compressionAlgorithm = 
comp
                                                                        
.codecNumberForMetadata();
                                                        sourceLength = 
sourceData.size();
@@ -195,7 +200,7 @@
                }
         }
         if(finalData == null) {
-            if(sourceData.size() > MAX_COMPRESSED_DATA_LENGTH) {
+            if(sourceData.size() > maxCompressedDataLength) {
                 throw new CHKEncodeException("Too big");
             }
                finalData = BucketTools.toByteArray(sourceData);

Modified: trunk/freenet/src/freenet/keys/SSKBlock.java
===================================================================
--- trunk/freenet/src/freenet/keys/SSKBlock.java        2006-10-31 17:39:43 UTC 
(rev 10756)
+++ trunk/freenet/src/freenet/keys/SSKBlock.java        2006-10-31 17:40:58 UTC 
(rev 10757)
@@ -51,6 +51,8 @@
     final short symCipherIdentifier;

     public static final short DATA_LENGTH = 1024;
+    /* Maximum length of compressed payload */
+       public static final int MAX_COMPRESSED_DATA_LENGTH = DATA_LENGTH - 2;

     static final short SIG_R_LENGTH = 32;
     static final short SIG_S_LENGTH = 32;


Reply via email to