Author: nextgens
Date: 2008-10-21 15:24:47 +0000 (Tue, 21 Oct 2008)
New Revision: 23014

Modified:
   trunk/freenet/src/freenet/client/ArchiveManager.java
   trunk/freenet/src/freenet/client/ArchiveStoreContext.java
   trunk/freenet/src/freenet/client/ClientMetadata.java
   trunk/freenet/src/freenet/client/HighLevelSimpleClientImpl.java
   trunk/freenet/src/freenet/client/Metadata.java
   trunk/freenet/src/freenet/client/async/ClientPutter.java
   trunk/freenet/src/freenet/client/async/SimpleManifestPutter.java
   trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
   trunk/freenet/src/freenet/client/async/SingleFileInserter.java
   trunk/freenet/src/freenet/client/async/SplitFileInserter.java
   trunk/freenet/src/freenet/clients/http/WelcomeToadlet.java
   trunk/freenet/src/freenet/frost/message/FrostMessage.java
   trunk/freenet/src/freenet/node/NodeARKInserter.java
   trunk/freenet/src/freenet/node/TextModeClientInterface.java
   trunk/freenet/src/freenet/node/fcp/ClientPut.java
   trunk/freenet/src/freenet/node/fcp/DirPutFile.java
   trunk/freenet/src/freenet/node/simulator/BootstrapPushPullTest.java
Log:
more work on bug #71: *** IT NEEDS TESTING! ***
It's still not backward compatible with stable but should be forward-compatible 
;)

Modified: trunk/freenet/src/freenet/client/ArchiveManager.java
===================================================================
--- trunk/freenet/src/freenet/client/ArchiveManager.java        2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/ArchiveManager.java        2008-10-21 
15:24:47 UTC (rev 23014)
@@ -18,9 +18,11 @@
 import freenet.support.MutableBoolean;
 import freenet.support.api.Bucket;
 import freenet.support.api.BucketFactory;
-import freenet.support.compress.Bzip2Compressor;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
 import freenet.support.io.BucketTools;
 import freenet.support.io.Closer;
+import java.io.InputStream;
+import java.util.zip.GZIPInputStream;
 import org.apache.tools.bzip2.CBZip2InputStream;
 import org.apache.tools.tar.TarEntry;
 import org.apache.tools.tar.TarInputStream;
@@ -80,6 +82,13 @@
                        return null;
                }

+               public static ARCHIVE_TYPE getArchiveType(short type) {
+                       for(ARCHIVE_TYPE current : values())
+                               if(current.metadataID == type)
+                                       return current;
+                       return null;
+               }
+               
                public final static ARCHIVE_TYPE getDefault() {
                        return TAR;
                }
@@ -155,12 +164,12 @@
         * @param archiveType The archive type, defined in Metadata.
         * @return An archive handler. 
         */
-       public synchronized ArchiveHandler makeHandler(FreenetURI key, short 
archiveType, boolean returnNullIfNotFound, boolean forceRefetchArchive) {
+       public synchronized ArchiveHandler makeHandler(FreenetURI key, 
ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE ctype, boolean returnNullIfNotFound, 
boolean forceRefetchArchive) {
                ArchiveHandler handler = null;
                if(!forceRefetchArchive) handler = getCached(key);
                if(handler != null) return handler;
                if(returnNullIfNotFound) return null;
-               handler = new ArchiveStoreContext(this, key, archiveType, 
forceRefetchArchive);
+               handler = new ArchiveStoreContext(this, key, archiveType, 
ctype, forceRefetchArchive);
                putCached(key, handler);
                return handler;
        }
@@ -216,7 +225,7 @@
         * @throws ArchiveRestartException If the request needs to be restarted 
because the archive
         * changed.
         */
-       public void extractToCache(FreenetURI key, short archiveType, Bucket 
data, ArchiveContext archiveContext, ArchiveStoreContext ctx, String element, 
ArchiveExtractCallback callback) throws ArchiveFailureException, 
ArchiveRestartException {
+       public void extractToCache(FreenetURI key, ARCHIVE_TYPE archiveType, 
COMPRESSOR_TYPE ctype, Bucket data, ArchiveContext archiveContext, 
ArchiveStoreContext ctx, String element, ArchiveExtractCallback callback) 
throws ArchiveFailureException, ArchiveRestartException {

                logMINOR = Logger.shouldLog(Logger.MINOR, this);

@@ -249,21 +258,40 @@
                }
                if(data.size() > archiveContext.maxArchiveSize)
                        throw new ArchiveFailureException("Archive too big 
("+data.size()+" > "+archiveContext.maxArchiveSize+")!");
-               if(ARCHIVE_TYPE.ZIP.metadataID ==  archiveType)
-                       handleZIPArchive(ctx, key, data, element, callback, 
gotElement, throwAtExit);
-               else if(ARCHIVE_TYPE.TAR.metadataID == archiveType)
-                       handleTARArchive(ctx, key, data, element, callback, 
gotElement, throwAtExit);
+               
+               
+               InputStream is = null;
+               try {
+                       if(ctype == null) {
+                               if(logMINOR) Logger.minor(this, "No 
compression");
+                               is = data.getInputStream();
+                       } else if(ctype == COMPRESSOR_TYPE.BZIP2) {
+                               if(logMINOR) Logger.minor(this, "dealing with 
BZIP2");
+                               is = new 
CBZip2InputStream(data.getInputStream());
+                       } else if(ctype == COMPRESSOR_TYPE.GZIP) {
+                               if(logMINOR) Logger.minor(this, "dealing with 
GZIP");
+                               is = new GZIPInputStream(data.getInputStream());
+                       } else
+                               throw new ArchiveFailureException("Unknown or 
unsupported compression algorithm "+ctype);
+
+                       if(ARCHIVE_TYPE.ZIP == archiveType)
+                               handleZIPArchive(ctx, key, is, element, 
callback, gotElement, throwAtExit);
+                       else if(ARCHIVE_TYPE.TAR == archiveType)
+                               handleTARArchive(ctx, key, is, element, 
callback, gotElement, throwAtExit);
                else
-                       throw new ArchiveFailureException("Unknown or 
unsupported archive algorithm "+archiveType);
+                               throw new ArchiveFailureException("Unknown or 
unsupported archive algorithm " + archiveType);
+               } catch (IOException ioe) {
+                       throw new ArchiveFailureException("An IOE occured: 
"+ioe.getMessage(), ioe);
+               }finally {
+                       Closer.close(is);
        }
+       }

-       private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, 
Bucket data, String element, ArchiveExtractCallback callback, MutableBoolean 
gotElement, boolean throwAtExit) throws ArchiveFailureException, 
ArchiveRestartException {
+       private void handleTARArchive(ArchiveStoreContext ctx, FreenetURI key, 
InputStream data, String element, ArchiveExtractCallback callback, 
MutableBoolean gotElement, boolean throwAtExit) throws ArchiveFailureException, 
ArchiveRestartException {
                if(logMINOR) Logger.minor(this, "Handling a TAR Archive");
-               CBZip2InputStream bz2is = null;
                TarInputStream tarIS = null;
                try {
-                       bz2is = new CBZip2InputStream(data.getInputStream());
-                       tarIS = new TarInputStream(bz2is);
+                       tarIS = new TarInputStream(data);

                        // MINOR: Assumes the first entry in the tarball is a 
directory. 
                        TarEntry entry;
@@ -324,22 +352,15 @@
                } catch (IOException e) {
                        throw new ArchiveFailureException("Error reading 
archive: "+e.getMessage(), e);
                } finally {
-                       if(bz2is != null) {
-                               try {
-                                       bz2is.close();
-                               } catch (IOException e) {
-                                       Logger.error(this, "Failed to close 
stream: "+e, e);
-                               }
-                       }
                        Closer.close(tarIS);
                }
        }

-       private void handleZIPArchive(ArchiveStoreContext ctx, FreenetURI key, 
Bucket data, String element, ArchiveExtractCallback callback, MutableBoolean 
gotElement, boolean throwAtExit) throws ArchiveFailureException, 
ArchiveRestartException {
+       private void handleZIPArchive(ArchiveStoreContext ctx, FreenetURI key, 
InputStream data, String element, ArchiveExtractCallback callback, 
MutableBoolean gotElement, boolean throwAtExit) throws ArchiveFailureException, 
ArchiveRestartException {
                if(logMINOR) Logger.minor(this, "Handling a ZIP Archive");
                ZipInputStream zis = null;
                try {
-                       zis = new ZipInputStream(data.getInputStream());
+                       zis = new ZipInputStream(data);

                        // MINOR: Assumes the first entry in the zip is a 
directory. 
                        ZipEntry entry;

Modified: trunk/freenet/src/freenet/client/ArchiveStoreContext.java
===================================================================
--- trunk/freenet/src/freenet/client/ArchiveStoreContext.java   2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/ArchiveStoreContext.java   2008-10-21 
15:24:47 UTC (rev 23014)
@@ -7,6 +7,7 @@
 import freenet.support.DoublyLinkedListImpl;
 import freenet.support.Logger;
 import freenet.support.api.Bucket;
+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;

 /**
  * Tracks all files currently in the cache from a given key.
@@ -24,7 +25,8 @@

        private ArchiveManager manager;
        private FreenetURI key;
-       private final short archiveType;
+       private final ArchiveManager.ARCHIVE_TYPE archiveType;
+       private final COMPRESSOR_TYPE compressorType;
        private boolean forceRefetchArchive;
        /** Archive size */
        private long lastSize = -1;
@@ -36,10 +38,11 @@
         * the inner lock to avoid deadlocks. */
        private final DoublyLinkedListImpl myItems;

-       public ArchiveStoreContext(ArchiveManager manager, FreenetURI key, 
short archiveType, boolean forceRefetchArchive) {
+       public ArchiveStoreContext(ArchiveManager manager, FreenetURI key, 
ArchiveManager.ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE ctype, boolean 
forceRefetchArchive) {
                this.manager = manager;
                this.key = key;
                this.archiveType = archiveType;
+               this.compressorType = ctype;
                myItems = new DoublyLinkedListImpl();
                this.forceRefetchArchive = forceRefetchArchive;
        }
@@ -131,7 +134,7 @@
        }

        public short getArchiveType() {
-               return archiveType;
+               return archiveType.metadataID;
        }

        public FreenetURI getKey() {
@@ -139,7 +142,7 @@
        }

        public void extractToCache(Bucket bucket, ArchiveContext actx, String 
element, ArchiveExtractCallback callback) throws ArchiveFailureException, 
ArchiveRestartException {
-               manager.extractToCache(key, archiveType, bucket, actx, this, 
element, callback);
+               manager.extractToCache(key, archiveType, compressorType, 
bucket, actx, this, element, callback);
        }

        /** Called just before extracting this container to the cache */

Modified: trunk/freenet/src/freenet/client/ClientMetadata.java
===================================================================
--- trunk/freenet/src/freenet/client/ClientMetadata.java        2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/ClientMetadata.java        2008-10-21 
15:24:47 UTC (rev 23014)
@@ -3,6 +3,8 @@
  * http://www.gnu.org/ for further details of the GPL. */
 package freenet.client;

+import freenet.support.compress.Compressor.COMPRESSOR_TYPE;
+
 /**
  * Stores the metadata that the client might actually be interested in.
  */
@@ -10,14 +12,16 @@

        /** The document MIME type */
        private String mimeType;
+       private COMPRESSOR_TYPE compressor;

-       public ClientMetadata(String mime) {
-               mimeType = (mime == null) ? null : mime.intern();
+       public ClientMetadata(){
+               mimeType = null;
+               compressor = null;
        }

-       /** Create an empty ClientMetadata instance */
-       public ClientMetadata() {
-               mimeType = null;
+       public ClientMetadata(String mime, COMPRESSOR_TYPE comp) {
+               mimeType = (mime == null) ? null : mime.intern();
+               compressor = comp;
        }

        /** Get the document MIME type. Will always be a valid MIME type, 
unless there
@@ -68,4 +72,12 @@
                }
                return s;
        }
+       
+       public COMPRESSOR_TYPE getCompressorType() {
+               return compressor;
 }
+       
+       public void setCompressorType(COMPRESSOR_TYPE compressor) {
+               this.compressor = compressor;
+       }
+}

Modified: trunk/freenet/src/freenet/client/HighLevelSimpleClientImpl.java
===================================================================
--- trunk/freenet/src/freenet/client/HighLevelSimpleClientImpl.java     
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/HighLevelSimpleClientImpl.java     
2008-10-21 15:24:47 UTC (rev 23014)
@@ -166,7 +166,7 @@
        }

        public FreenetURI insertRedirect(FreenetURI insertURI, FreenetURI 
targetURI) throws InsertException {
-               Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, (short)-1, 
targetURI, new ClientMetadata());
+               Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, null, null, 
targetURI, new ClientMetadata());
                Bucket b;
                try {
                        b = BucketTools.makeImmutableBucket(bucketFactory, 
m.writeToByteArray());

Modified: trunk/freenet/src/freenet/client/Metadata.java
===================================================================
--- trunk/freenet/src/freenet/client/Metadata.java      2008-10-21 14:22:02 UTC 
(rev 23013)
+++ trunk/freenet/src/freenet/client/Metadata.java      2008-10-21 15:24:47 UTC 
(rev 23014)
@@ -74,12 +74,12 @@
        /** Container archive type 
         * @see ARCHIVE_TYPE
         */
-       short archiveType;
+       ARCHIVE_TYPE archiveType;

        /** Compressed splitfile codec 
         * @see COMPRESSOR_TYPE
         */
-       short compressionCodec = -1;
+       COMPRESSOR_TYPE compressionCodec;

        /** The length of the splitfile */
        long dataLength;
@@ -197,8 +197,8 @@

                if(documentType == ARCHIVE_MANIFEST) {
                        if(logMINOR) Logger.minor(this, "Archive manifest");
-                       archiveType = dis.readShort();
-                       if(!ARCHIVE_TYPE.isValidMetadataID(archiveType))
+                       archiveType = 
ARCHIVE_TYPE.getArchiveType(dis.readShort());
+                       if(archiveType == null)
                                throw new MetadataParseException("Unrecognized 
archive type "+archiveType);
                }

@@ -215,8 +215,8 @@
                }

                if(compressed) {
-                       compressionCodec = dis.readShort();
-                       if(!COMPRESSOR_TYPE.isValidMetadataID(compressionCodec))
+                       compressionCodec = 
COMPRESSOR_TYPE.getCompressorByMetadataID(dis.readShort());
+                       if(compressionCodec == null)
                                throw new MetadataParseException("Unrecognized 
splitfile compression codec "+compressionCodec);

                        decompressedLength = dis.readLong();
@@ -267,7 +267,7 @@
                        extraMetadata = false; // can't parse, can't write
                }

-               clientMetadata = new ClientMetadata(mimeType);
+               clientMetadata = new ClientMetadata(mimeType, compressionCodec);

                if((!splitfile) && ((documentType == SIMPLE_REDIRECT) || 
(documentType == ARCHIVE_MANIFEST))) {
                        simpleRedirectKey = readKey(dis);
@@ -388,7 +388,7 @@
                        if(o instanceof String) {
                                // External redirect
                                FreenetURI uri = new FreenetURI((String)o);
-                               target = new Metadata(SIMPLE_REDIRECT, (short) 
-1, uri, null);
+                               target = new Metadata(SIMPLE_REDIRECT, null, 
null, uri, null);
                        } else if(o instanceof HashMap) {
                                target = new Metadata();
                                target.addRedirectionManifest((HashMap)o);
@@ -461,7 +461,7 @@
                documentType = SIMPLE_MANIFEST;
                noMIME = true;
                mimeType = null;
-               clientMetadata = new ClientMetadata(null);
+               clientMetadata = new ClientMetadata(null,null);
                manifestEntries = new HashMap();
                int count = 0;
                for(Iterator i = dir.keySet().iterator();i.hasNext();) {
@@ -471,7 +471,8 @@
                        Metadata target;
                        if(o instanceof String) {
                                // Zip internal redirect
-                               target = new 
Metadata(ARCHIVE_INTERNAL_REDIRECT, (short)-1, prefix+key, new 
ClientMetadata(DefaultMIMETypes.guessMIMEType(key, false)));
+                               target = new 
Metadata(ARCHIVE_INTERNAL_REDIRECT, null, null, prefix+key,
+                                       new 
ClientMetadata(DefaultMIMETypes.guessMIMEType(key, false),null));
                        } else if(o instanceof HashMap) {
                                target = new Metadata((HashMap)o, 
prefix+key+"/");
                        } else throw new IllegalArgumentException("Not String 
nor HashMap: "+o);
@@ -486,12 +487,13 @@
         * @param arg The argument; in the case of ZIP_INTERNAL_REDIRECT, the 
filename in
         * the archive to read from.
         */
-       public Metadata(byte docType, short archiveType, String arg, 
ClientMetadata cm) {
+       public Metadata(byte docType, ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE 
compressionCodec, String arg, ClientMetadata cm) {
                if(docType == ARCHIVE_INTERNAL_REDIRECT) {
                        documentType = docType;
                        this.archiveType = archiveType;
                        // Determine MIME type
                        this.clientMetadata = cm;
+                       this.compressionCodec = compressionCodec;
                        if(cm != null)
                                this.setMIMEType(cm.getMIMEType());
                        nameInArchive = arg;
@@ -505,10 +507,11 @@
         * @param uri The URI pointed to.
         * @param cm The client metadata, if any.
         */
-       public Metadata(byte docType, short archiveType, FreenetURI uri, 
ClientMetadata cm) {
+       public Metadata(byte docType, ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE 
compressionCodec, FreenetURI uri, ClientMetadata cm) {
                if((docType == SIMPLE_REDIRECT) || (docType == 
ARCHIVE_MANIFEST)) {
                        documentType = docType;
                        this.archiveType = archiveType;
+                       this.compressionCodec = compressionCodec;
                        clientMetadata = cm;
                        if((cm != null) && !cm.isTrivial()) {
                                setMIMEType(cm.getMIMEType());
@@ -524,11 +527,11 @@
        }

        public Metadata(short algo, ClientCHK[] dataURIs, ClientCHK[] 
checkURIs, int segmentSize, int checkSegmentSize, 
-                       ClientMetadata cm, long dataLength, short 
compressionAlgo, long decompressedLength, boolean isMetadata, boolean 
insertAsArchiveManifest, short archiveType) {
+                       ClientMetadata cm, long dataLength, ARCHIVE_TYPE 
archiveType, COMPRESSOR_TYPE compressionCodec, long decompressedLength, boolean 
isMetadata) {
                if(isMetadata)
                        documentType = MULTI_LEVEL_METADATA;
                else {
-                       if(insertAsArchiveManifest) {
+                       if(archiveType != null) {
                                documentType = ARCHIVE_MANIFEST;
                                this.archiveType = archiveType;
                        } else documentType = SIMPLE_REDIRECT;
@@ -536,12 +539,13 @@
                splitfile = true;
                splitfileAlgorithm = algo;
                this.dataLength = dataLength;
-               this.compressionCodec = compressionAlgo;
+               this.compressionCodec = compressionCodec;
                splitfileBlocks = dataURIs.length;
                splitfileCheckBlocks = checkURIs.length;
                splitfileDataKeys = dataURIs;
                splitfileCheckKeys = checkURIs;
                clientMetadata = cm;
+               this.compressionCodec = compressionCodec;
                this.decompressedLength = decompressedLength;
                if(cm != null)
                        setMIMEType(cm.getMIMEType());
@@ -728,7 +732,7 @@
        }

        /** What kind of archive is it? */
-       public short getArchiveType() {
+       public ARCHIVE_TYPE getArchiveType() {
                return archiveType;
        }

@@ -755,20 +759,21 @@
                        if(compressedMIME) flags |= FLAGS_COMPRESSED_MIME;
                        if(extraMetadata) flags |= FLAGS_EXTRA_METADATA;
                        if(fullKeys) flags |= FLAGS_FULL_KEYS;
-                       if(compressionCodec >= 0) flags |= FLAGS_COMPRESSED;
+                       if(compressionCodec != null) flags |= FLAGS_COMPRESSED;
                        dos.writeShort(flags);
                }

                if(documentType == ARCHIVE_MANIFEST) {
-                       dos.writeShort(archiveType);
+                       short code = archiveType.metadataID;
+                       dos.writeShort(code);
                }

                if(splitfile) {
                        dos.writeLong(dataLength);
                }

-               if(compressionCodec >= 0) {
-                       dos.writeShort(compressionCodec);
+               if(compressionCodec != null) {
+                       dos.writeShort(compressionCodec.metadataID);
                        dos.writeLong(decompressedLength);
                }

@@ -827,7 +832,7 @@
                                        if(data.length > Short.MAX_VALUE) {
                                                FreenetURI uri = 
meta.resolvedURI;
                                                if(uri != null) {
-                                                       meta = new 
Metadata(SIMPLE_REDIRECT, (short)-1,  uri, null);
+                                                       meta = new 
Metadata(SIMPLE_REDIRECT, null, null, uri, null);
                                                        data = 
meta.writeToByteArray();
                                                } else {
                                                        kill = true;
@@ -878,10 +883,10 @@
        }

        public boolean isCompressed() {
-               return compressionCodec >= 0;
+               return compressionCodec != null;
        }

-       public short getCompressionCodec() {
+       public COMPRESSOR_TYPE getCompressionCodec() {
                return compressionCodec;
        }

@@ -915,7 +920,9 @@
        }

        public void setArchiveManifest() {
-               archiveType = 
ARCHIVE_TYPE.getArchiveType(clientMetadata.getMIMEType()).metadataID;
+               ARCHIVE_TYPE type = 
ARCHIVE_TYPE.getArchiveType(clientMetadata.getMIMEType());
+               archiveType = type;
+               compressionCodec = clientMetadata.getCompressorType();
                clientMetadata.clear();
                documentType = ARCHIVE_MANIFEST;
        }

Modified: trunk/freenet/src/freenet/client/async/ClientPutter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/ClientPutter.java    2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/async/ClientPutter.java    2008-10-21 
15:24:47 UTC (rev 23014)
@@ -96,7 +96,7 @@
                                        if(!binaryBlob)
                                                currentState =
                                                        new 
SingleFileInserter(this, this, new InsertBlock(data, cm, targetURI), 
isMetadata, ctx, 
-                                                                       false, 
getCHKOnly, false, null, false, false, targetFilename, earlyEncode);
+                                                                       false, 
getCHKOnly, false, null, null, false, targetFilename, earlyEncode);
                                        else
                                                currentState =
                                                        new 
BinaryBlobInserter(data, this, null, false, priorityClass, ctx);

Modified: trunk/freenet/src/freenet/client/async/SimpleManifestPutter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SimpleManifestPutter.java    
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/async/SimpleManifestPutter.java    
2008-10-21 15:24:47 UTC (rev 23014)
@@ -25,7 +25,6 @@
 import freenet.support.Logger;
 import freenet.support.api.Bucket;
 import freenet.support.io.BucketTools;
-import org.apache.tools.bzip2.CBZip2OutputStream;
 import org.apache.tools.tar.TarEntry;
 import org.apache.tools.tar.TarOutputStream;

@@ -41,7 +40,7 @@
                        InsertBlock block = 
                                new InsertBlock(data, cm, 
FreenetURI.EMPTY_CHK_URI);
                        this.origSFI =
-                               new SingleFileInserter(this, this, block, 
false, ctx, false, getCHKOnly, true, null, false, false, null, earlyEncode);
+                               new SingleFileInserter(this, this, block, 
false, ctx, false, getCHKOnly, true, null, null, false, null, earlyEncode);
                        metadata = null;
                }

@@ -49,7 +48,7 @@
                        super(smp.getPriorityClass(), smp.chkScheduler, 
smp.sskScheduler, smp.client);
                        this.cm = cm;
                        this.data = null;
-                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
(short)-1, target, cm);
+                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
null, null, target, cm);
                        metadata = m;
                        origSFI = null;
                }
@@ -59,7 +58,7 @@
                        this.cm = cm;
                        this.data = data;
                        this.targetInArchive = targetInArchive;
-                       Metadata m = new 
Metadata(Metadata.ARCHIVE_INTERNAL_REDIRECT, (short)-1, targetInArchive, cm);
+                       Metadata m = new 
Metadata(Metadata.ARCHIVE_INTERNAL_REDIRECT, null, null, targetInArchive, cm);
                        metadata = m;
                        origSFI = null;
                }
@@ -118,7 +117,7 @@
                                // The file was too small to have its own 
metadata, we get this instead.
                                // So we make the key into metadata.
                                Metadata m =
-                                       new Metadata(Metadata.SIMPLE_REDIRECT, 
(short) -1, key.getURI(), cm);
+                                       new Metadata(Metadata.SIMPLE_REDIRECT, 
null, null, key.getURI(), cm);
                                onMetadata(m, null);
                        }
                }
@@ -311,7 +310,7 @@
                                if(mimeType == null || 
mimeType.equals(DefaultMIMETypes.DEFAULT_MIME_TYPE))
                                        cm = null;
                                else
-                                       cm = new ClientMetadata(mimeType);
+                                       cm = new ClientMetadata(mimeType, null);
                                PutHandler ph;
                                Bucket data = element.data;
                                if(element.targetURI != null) {
@@ -418,6 +417,7 @@
                InsertBlock block;
                boolean isMetadata = true;
                boolean insertAsArchiveManifest = false;
+               ARCHIVE_TYPE archiveType = null;
                if(!(elementsToPutInArchive.isEmpty())) {
                        // There is an archive to insert.
                        // We want to include the metadata.
@@ -426,16 +426,19 @@
                        try {                           
                                Bucket outputBucket = 
ctx.bf.makeBucket(baseMetadata.dataLength());
                                // TODO: try both ? - maybe not worth it
-                               String mimeType = (ARCHIVE_TYPE.getDefault() == 
ARCHIVE_TYPE.TAR ?
+                               archiveType = ARCHIVE_TYPE.getDefault();
+                               String mimeType = (archiveType == 
ARCHIVE_TYPE.TAR ?
                                        createTarBucket(bucket, outputBucket) :
                                        createZipBucket(bucket, outputBucket));

+                               if(logMINOR) Logger.minor(this, "We are using 
"+archiveType);
+                               
                                // Now we have to insert the Archive we have 
generated.

                                // Can we just insert it, and not bother with a 
redirect to it?
                                // Thereby exploiting implicit manifest 
support, which will pick up on .metadata??
                                // We ought to be able to !!
-                               block = new InsertBlock(outputBucket, new 
ClientMetadata(mimeType), targetURI);
+                               block = new InsertBlock(outputBucket, new 
ClientMetadata(mimeType, null), targetURI);
                                isMetadata = false;
                                insertAsArchiveManifest = true;
                        } catch (IOException e) {
@@ -446,7 +449,7 @@
                        block = new InsertBlock(bucket, null, targetURI);
                try {
                        SingleFileInserter metadataInserter = 
-                               new SingleFileInserter(this, this, block, 
isMetadata, ctx, false, getCHKOnly, false, baseMetadata, 
insertAsArchiveManifest, true, null, earlyEncode);
+                               new SingleFileInserter(this, this, block, 
isMetadata, ctx, false, getCHKOnly, false, baseMetadata, archiveType, true, 
null, earlyEncode);
                        if(logMINOR) Logger.minor(this, "Inserting main 
metadata: "+metadataInserter);
                        this.metadataPuttersByMetadata.put(baseMetadata, 
metadataInserter);
                        metadataPuttersUnfetchable.put(baseMetadata, 
metadataInserter);
@@ -460,8 +463,7 @@
                if(logMINOR) Logger.minor(this, "Create a TAR Bucket");

                OutputStream os = new 
BufferedOutputStream(outputBucket.getOutputStream());
-               CBZip2OutputStream bz2OS = new CBZip2OutputStream(os);
-               TarOutputStream tarOS = new TarOutputStream(bz2OS);
+               TarOutputStream tarOS = new TarOutputStream(os);
                TarEntry ze;

                for(PutHandler ph : elementsToPutInArchive) {
@@ -537,7 +539,7 @@

                                InsertBlock ib = new InsertBlock(b, null, 
FreenetURI.EMPTY_CHK_URI);
                                SingleFileInserter metadataInserter = 
-                                       new SingleFileInserter(this, this, ib, 
true, ctx, false, getCHKOnly, false, m, false, true, null, earlyEncode);
+                                       new SingleFileInserter(this, this, ib, 
true, ctx, false, getCHKOnly, false, m, null, true, null, earlyEncode);
                                if(logMINOR) Logger.minor(this, "Inserting 
subsidiary metadata: "+metadataInserter+" for "+m);
                                synchronized(this) {
                                        this.metadataPuttersByMetadata.put(m, 
metadataInserter);

Modified: trunk/freenet/src/freenet/client/async/SingleFileFetcher.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SingleFileFetcher.java       
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/async/SingleFileFetcher.java       
2008-10-21 15:24:47 UTC (rev 23014)
@@ -259,7 +259,7 @@
                                }
                                continue; // loop
                        } else if(metadata.isArchiveManifest()) {
-                               if(logMINOR) Logger.minor(this, "Is archive 
manifest (type="+metadata.getArchiveType()+')');
+                               if(logMINOR) Logger.minor(this, "Is archive 
manifest (type="+metadata.getArchiveType()+" 
codec="+metadata.getCompressionCodec()+')');
                                if(metaStrings.isEmpty() && 
ctx.returnZIPManifests) {
                                        // Just return the archive, whole.
                                        metadata.setSimpleRedirect();
@@ -270,7 +270,7 @@
                                // It's more efficient to keep the existing ah 
if we can, and it is vital in
                                // the case of binary blobs.
                                if(ah == null || !ah.getKey().equals(thisKey))
-                                       ah = (ArchiveStoreContext) 
ctx.archiveManager.makeHandler(thisKey, metadata.getArchiveType(), false, 
+                                       ah = (ArchiveStoreContext) 
ctx.archiveManager.makeHandler(thisKey, metadata.getArchiveType(), 
metadata.getCompressionCodec(), false, 
                                                        (parent instanceof 
ClientGetter ? ((ClientGetter)parent).collectingBinaryBlob() : false));
                                archiveMetadata = metadata;
                                // ah is set. This means we are currently 
handling an archive.
@@ -442,7 +442,7 @@
                                if((redirectedKey instanceof ClientCHK) && 
!((ClientCHK)redirectedKey).isMetadata())
                                        rcb.onBlockSetFinished(this);
                                if(metadata.isCompressed()) {
-                                       COMPRESSOR_TYPE codec = 
COMPRESSOR_TYPE.getCompressorByMetadataID(metadata.getCompressionCodec());
+                                       COMPRESSOR_TYPE codec = 
metadata.getCompressionCodec();
                                        f.addDecompressor(codec);
                                }
                                parent.onTransition(this, f);
@@ -478,7 +478,7 @@
                                // Splitfile (possibly compressed)

                                if(metadata.isCompressed()) {
-                                       COMPRESSOR_TYPE codec = 
COMPRESSOR_TYPE.getCompressorByMetadataID(metadata.getCompressionCodec());
+                                       COMPRESSOR_TYPE codec = 
metadata.getCompressionCodec();
                                        addDecompressor(codec);
                                }


Modified: trunk/freenet/src/freenet/client/async/SingleFileInserter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SingleFileInserter.java      
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/async/SingleFileInserter.java      
2008-10-21 15:24:47 UTC (rev 23014)
@@ -44,7 +44,8 @@
        final boolean metadata;
        final PutCompletionCallback cb;
        final boolean getCHKOnly;
-       final boolean insertAsArchiveManifest;
+       final ARCHIVE_TYPE archiveType;
+       COMPRESSOR_TYPE compressorUsed;
        /** If true, we are not the top level request, and should not
         * update our parent to point to us as current put-stage. */
        private final boolean reportMetadataOnly;
@@ -70,7 +71,7 @@
         */
        SingleFileInserter(BaseClientPutter parent, PutCompletionCallback cb, 
InsertBlock block, 
                        boolean metadata, InsertContext ctx, boolean 
dontCompress, 
-                       boolean getCHKOnly, boolean reportMetadataOnly, Object 
token, boolean insertAsArchiveManifest, 
+                       boolean getCHKOnly, boolean reportMetadataOnly, Object 
token, ARCHIVE_TYPE archiveType, 
                        boolean freeData, String targetFilename, boolean 
earlyEncode) throws InsertException {
                this.earlyEncode = earlyEncode;
                this.reportMetadataOnly = reportMetadataOnly;
@@ -81,7 +82,7 @@
                this.metadata = metadata;
                this.cb = cb;
                this.getCHKOnly = getCHKOnly;
-               this.insertAsArchiveManifest = insertAsArchiveManifest;
+               this.archiveType = archiveType;
                this.freeData = freeData;
                this.targetFilename = targetFilename;
                logMINOR = Logger.shouldLog(Logger.MINOR, this);
@@ -165,9 +166,10 @@
                        // Try to compress the data.
                        // Try each algorithm, starting with the fastest and 
weakest.
                        // Stop when run out of algorithms, or the compressed 
data fits in a single block.
-                       try {
                                for(COMPRESSOR_TYPE comp : 
COMPRESSOR_TYPE.values()) {
-                                       if(logMINOR) Logger.minor(this, 
"Attempt to compress using "+comp);
+                               try {
+                                       if(logMINOR)
+                                               Logger.minor(this, "Attempt to 
compress using " + comp);
                                        // Only produce if we are compressing 
*the original data*
                                        if(parent == cb)
                                                
ctx.eventProducer.produceEvent(new StartedCompressionEvent(comp));
@@ -180,30 +182,32 @@
                                                bestCompressedData = result;
                                                break;
                                        }
-                                       if((bestCompressedData != null) && 
(result.size() <  bestCompressedData.size())) {
+                                       if((bestCompressedData != null) && 
(result.size() < bestCompressedData.size())) {
                                                bestCompressedData.free();
                                                bestCompressedData = result;
                                                bestCodec = comp;
                                        } else if((bestCompressedData == null) 
&& (result.size() < data.size())) {
                                                bestCompressedData = result;
                                                bestCodec = comp;
-                                       } else {
+                                       } else
                                                result.free();
-                                       }
-                               }
-                       } catch (IOException e) {
+
+                               } catch(IOException e) {
                                throw new 
InsertException(InsertException.BUCKET_ERROR, e, null);
-                       } catch (CompressionOutputSizeException e) {
+                               } catch(CompressionOutputSizeException e) {
                                // Impossible
                                throw new Error(e);
                        }
                }
-               boolean freeData = false;
+               }
+               boolean shouldFreeData = false;
                if(bestCompressedData != null) {
                        long compressedSize = bestCompressedData.size();
                        if(logMINOR) Logger.minor(this, "The best compression 
algorithm is "+bestCodec+ " we have a "+origSize/compressedSize+" ratio! 
("+origSize+'/'+compressedSize+')');
                        data = bestCompressedData;
-                       freeData = true;
+                       shouldFreeData = true;
+                       block.clientMetadata.setCompressorType(bestCodec);
+                       compressorUsed = bestCodec;
                }

                if(parent == cb) {
@@ -224,7 +228,7 @@
                        throw new 
InsertException(InsertException.INTERNAL_ERROR, "2GB+ should not encode to one 
block!", null);

                boolean noMetadata = ((block.clientMetadata == null) || 
block.clientMetadata.isTrivial()) && targetFilename == null;
-               if(noMetadata && !insertAsArchiveManifest) {
+               if(noMetadata && archiveType == null) {
                        if(fitsInOneBlockAsIs) {
                                // Just insert it
                                ClientPutState bi =
@@ -239,7 +243,7 @@
                        // Insert single block, then insert pointer to it
                        if(reportMetadataOnly) {
                                SingleBlockInserter dataPutter = new 
SingleBlockInserter(parent, data, codecNumber, FreenetURI.EMPTY_CHK_URI, ctx, 
cb, metadata, (int)origSize, -1, getCHKOnly, true, true, token);
-                               Metadata meta = 
makeMetadata(dataPutter.getURI());
+                               Metadata meta = makeMetadata(archiveType, 
bestCodec, dataPutter.getURI());
                                cb.onMetadata(meta, this);
                                cb.onTransition(this, dataPutter);
                                dataPutter.schedule();
@@ -248,7 +252,7 @@
                                MultiPutCompletionCallback mcb = 
                                        new MultiPutCompletionCallback(cb, 
parent, token);
                                SingleBlockInserter dataPutter = new 
SingleBlockInserter(parent, data, codecNumber, FreenetURI.EMPTY_CHK_URI, ctx, 
mcb, metadata, (int)origSize, -1, getCHKOnly, true, false, token);
-                               Metadata meta = 
makeMetadata(dataPutter.getURI());
+                               Metadata meta = makeMetadata(archiveType, 
bestCodec, dataPutter.getURI());
                                Bucket metadataBucket;
                                try {
                                        metadataBucket = 
BucketTools.makeImmutableBucket(ctx.bf, meta.writeToByteArray());
@@ -280,13 +284,13 @@
                // insert it. Then when the splitinserter has finished, and the
                // metadata insert has finished too, tell the master callback.
                if(reportMetadataOnly) {
-                       SplitFileInserter sfi = new SplitFileInserter(parent, 
cb, data, bestCodec, origSize, block.clientMetadata, ctx, getCHKOnly, metadata, 
token, insertAsArchiveManifest, freeData);
+                       SplitFileInserter sfi = new SplitFileInserter(parent, 
cb, data, bestCodec, origSize, block.clientMetadata, ctx, getCHKOnly, metadata, 
token, archiveType, shouldFreeData);
                        cb.onTransition(this, sfi);
                        sfi.start();
                        if(earlyEncode) sfi.forceEncode();
                } else {
                        SplitHandler sh = new SplitHandler();
-                       SplitFileInserter sfi = new SplitFileInserter(parent, 
sh, data, bestCodec, origSize, block.clientMetadata, ctx, getCHKOnly, metadata, 
token, insertAsArchiveManifest, freeData);
+                       SplitFileInserter sfi = new SplitFileInserter(parent, 
sh, data, bestCodec, origSize, block.clientMetadata, ctx, getCHKOnly, metadata, 
token, archiveType, shouldFreeData);
                        sh.sfi = sfi;
                        cb.onTransition(this, sh);
                        sfi.start();
@@ -294,8 +298,12 @@
                }
        }

-       private Metadata makeMetadata(FreenetURI uri) {
-               Metadata meta = new Metadata(insertAsArchiveManifest ? 
Metadata.ARCHIVE_MANIFEST : Metadata.SIMPLE_REDIRECT, 
ARCHIVE_TYPE.getDefault().metadataID, uri, block.clientMetadata);
+       private Metadata makeMetadata(ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE 
codec, FreenetURI uri) {
+               Metadata meta = null;
+               if(archiveType != null)
+                       meta = new Metadata(Metadata.ARCHIVE_MANIFEST, 
archiveType, codec, uri, block.clientMetadata);
+               else  // redirect
+                       meta = new Metadata(Metadata.SIMPLE_REDIRECT, 
archiveType, codec, uri, block.clientMetadata);
                if(targetFilename != null) {
                        HashMap hm = new HashMap();
                        hm.put(targetFilename, meta);
@@ -363,7 +371,7 @@
                        if(sfiFS == null)
                                throw new ResumeException("No 
SplitFileInserter");
                        ClientPutState newSFI, newMetaPutter = null;
-                       newSFI = new SplitFileInserter(parent, this, 
forceMetadata ? null : block.clientMetadata, ctx, getCHKOnly, meta, token, 
insertAsArchiveManifest, sfiFS);
+                       newSFI = new SplitFileInserter(parent, this, 
forceMetadata ? null : block.clientMetadata, ctx, getCHKOnly, meta, token, 
archiveType, compressorUsed, sfiFS);
                        if(logMINOR) Logger.minor(this, "Starting "+newSFI+" 
for "+this);
                        fs.removeSubset("SplitFileInserter");
                        SimpleFieldSet metaFS = fs.subset("MetadataPutter");
@@ -373,7 +381,7 @@
                                        if(type.equals("SplitFileInserter")) {
                                                // FIXME 
insertAsArchiveManifest ?!?!?!
                                                newMetaPutter = 
-                                                       new 
SplitFileInserter(parent, this, null, ctx, getCHKOnly, true, token, 
insertAsArchiveManifest, metaFS);
+                                                       new 
SplitFileInserter(parent, this, null, ctx, getCHKOnly, true, token, 
archiveType, compressorUsed, metaFS);
                                        } else if(type.equals("SplitHandler")) {
                                                newMetaPutter = new 
SplitHandler();
                                                
((SplitHandler)newMetaPutter).start(metaFS, true);
@@ -522,7 +530,7 @@
                        InsertBlock newBlock = new InsertBlock(metadataBucket, 
null, block.desiredURI);
                        try {
                                synchronized(this) {
-                                       metadataPutter = new 
SingleFileInserter(parent, this, newBlock, true, ctx, false, getCHKOnly, false, 
token, false, true, metaPutterTargetFilename, earlyEncode);
+                                       metadataPutter = new 
SingleFileInserter(parent, this, newBlock, true, ctx, false, getCHKOnly, false, 
token, archiveType, true, metaPutterTargetFilename, earlyEncode);
                                        // If EarlyEncode, then start the 
metadata insert ASAP, to get the key.
                                        // Otherwise, wait until the data is 
fetchable (to improve persistence).
                                        if(!(earlyEncode || 
splitInsertSuccess)) return;

Modified: trunk/freenet/src/freenet/client/async/SplitFileInserter.java
===================================================================
--- trunk/freenet/src/freenet/client/async/SplitFileInserter.java       
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/client/async/SplitFileInserter.java       
2008-10-21 15:24:47 UTC (rev 23014)
@@ -28,7 +28,7 @@
        final InsertContext ctx;
        final PutCompletionCallback cb;
        final long dataLength;
-       final short compressionCodec;
+       final COMPRESSOR_TYPE compressionCodec;
        final short splitfileAlgorithm;
        final int segmentSize;
        final int checkSegmentSize;
@@ -42,7 +42,7 @@
        private volatile boolean finished;
        private boolean fetchable;
        public final Object token;
-       final boolean insertAsArchiveManifest;
+       final ARCHIVE_TYPE archiveType;
        private boolean forceEncode;
        private final long decompressedLength;

@@ -53,7 +53,7 @@
                fs.putSingle("Type", "SplitFileInserter");
                fs.put("DataLength", dataLength);
                fs.put("DecompressedLength", decompressedLength);
-               fs.put("CompressionCodec", compressionCodec);
+               fs.putSingle("CompressionCodec", compressionCodec.toString());
                fs.put("SplitfileCodec", splitfileAlgorithm);
                fs.put("Finished", finished);
                fs.put("SegmentSize", segmentSize);
@@ -67,10 +67,11 @@
                return fs;
        }

-       public SplitFileInserter(BaseClientPutter put, PutCompletionCallback 
cb, Bucket data, COMPRESSOR_TYPE bestCodec, long decompressedLength, 
ClientMetadata clientMetadata, InsertContext ctx, boolean getCHKOnly, boolean 
isMetadata, Object token, boolean insertAsArchiveManifest, boolean freeData) 
throws InsertException {
+       public SplitFileInserter(BaseClientPutter put, PutCompletionCallback 
cb, Bucket data, COMPRESSOR_TYPE bestCodec, long decompressedLength, 
ClientMetadata clientMetadata, InsertContext ctx, boolean getCHKOnly, boolean 
isMetadata, Object token, ARCHIVE_TYPE archiveType, boolean freeData) throws 
InsertException {
                logMINOR = Logger.shouldLog(Logger.MINOR, this);
                this.parent = put;
-               this.insertAsArchiveManifest = insertAsArchiveManifest;
+               this.archiveType = archiveType;
+               this.compressionCodec = bestCodec;
                this.token = token;
                this.finished = false;
                this.isMetadata = isMetadata;
@@ -88,10 +89,6 @@
                }
                countDataBlocks = dataBuckets.length;
                // Encoding is done by segments
-               if(bestCodec == null)
-                       compressionCodec = -1;
-               else
-                       compressionCodec = bestCodec.metadataID;
                this.splitfileAlgorithm = ctx.splitfileAlgorithm;
                segmentSize = ctx.splitfileSegmentDataBlocks;
                checkSegmentSize = splitfileAlgorithm == 
Metadata.SPLITFILE_NONREDUNDANT ? 0 : ctx.splitfileSegmentCheckBlocks;
@@ -106,10 +103,10 @@
                parent.onMajorProgress();
        }

-       public SplitFileInserter(BaseClientPutter parent, PutCompletionCallback 
cb, ClientMetadata clientMetadata, InsertContext ctx, boolean getCHKOnly, 
boolean metadata, Object token, boolean insertAsArchiveManifest, SimpleFieldSet 
fs) throws ResumeException {
+       public SplitFileInserter(BaseClientPutter parent, PutCompletionCallback 
cb, ClientMetadata clientMetadata, InsertContext ctx, boolean getCHKOnly, 
boolean metadata, Object token, ARCHIVE_TYPE archiveType, COMPRESSOR_TYPE 
bestCodec, SimpleFieldSet fs) throws ResumeException {
                logMINOR = Logger.shouldLog(Logger.MINOR, this);
                this.parent = parent;
-               this.insertAsArchiveManifest = insertAsArchiveManifest;
+               this.archiveType = archiveType;
                this.token = token;
                this.finished = false;
                this.isMetadata = metadata;
@@ -149,12 +146,13 @@
                } catch (NumberFormatException e) {
                        throw new ResumeException("Corrupt CheckSegmentSize: 
"+e+" : "+length);
                }
+               if(bestCodec != null) {
+                       compressionCodec = bestCodec;
+               } else {
                String ccodec = fs.get("CompressionCodec");
-               if(ccodec == null) throw new ResumeException("No compression 
codec");
-               try {
-                       compressionCodec = Short.parseShort(ccodec);
-               } catch (NumberFormatException e) {
-                       throw new ResumeException("Corrupt CompressionCodec: 
"+e+" : "+ccodec);
+                       if(ccodec == null)
+                               throw new ResumeException("No compression 
codec");
+                       compressionCodec = COMPRESSOR_TYPE.valueOf(ccodec);
                }
                String scodec = fs.get("SplitfileCodec");
                if(scodec == null) throw new ResumeException("No splitfile 
codec");
@@ -291,7 +289,7 @@

                        if(!missingURIs) {
                                // Create Metadata
-                               m = new Metadata(splitfileAlgorithm, dataURIs, 
checkURIs, segmentSize, checkSegmentSize, cm, dataLength, compressionCodec, 
decompressedLength, isMetadata, insertAsArchiveManifest, 
ARCHIVE_TYPE.getDefault().metadataID);
+                               m = new Metadata(splitfileAlgorithm, dataURIs, 
checkURIs, segmentSize, checkSegmentSize, cm, dataLength, archiveType, 
compressionCodec, decompressedLength, isMetadata);
                        }
                        haveSentMetadata = true;
                }

Modified: trunk/freenet/src/freenet/clients/http/WelcomeToadlet.java
===================================================================
--- trunk/freenet/src/freenet/clients/http/WelcomeToadlet.java  2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/clients/http/WelcomeToadlet.java  2008-10-21 
15:24:47 UTC (rev 23014)
@@ -305,7 +305,7 @@
             if (type == null) {
                 type = "text/plain";
             }
-            ClientMetadata contentType = new ClientMetadata(type);
+            ClientMetadata contentType = new ClientMetadata(type, null);

             Bucket bucket = request.getPart("filename");


Modified: trunk/freenet/src/freenet/frost/message/FrostMessage.java
===================================================================
--- trunk/freenet/src/freenet/frost/message/FrostMessage.java   2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/frost/message/FrostMessage.java   2008-10-21 
15:24:47 UTC (rev 23014)
@@ -293,7 +293,7 @@
                key = this.composeUploadKey(innitialIndex);
                keepgoing = false;

-            block = new InsertBlock(new ArrayBucket(data), new 
ClientMetadata(type), key);
+            block = new InsertBlock(new ArrayBucket(data), new 
ClientMetadata(type, null), key);

                // try inserting the message with the key
             try {

Modified: trunk/freenet/src/freenet/node/NodeARKInserter.java
===================================================================
--- trunk/freenet/src/freenet/node/NodeARKInserter.java 2008-10-21 14:22:02 UTC 
(rev 23013)
+++ trunk/freenet/src/freenet/node/NodeARKInserter.java 2008-10-21 15:24:47 UTC 
(rev 23014)
@@ -156,7 +156,7 @@


                inserter = new ClientPutter(this, b, uri,
-                                       new ClientMetadata("text/plain") /* it 
won't quite fit in an SSK anyway */, 
+                                       new ClientMetadata("text/plain", null) 
/* it won't quite fit in an SSK anyway */, 
                                        node.clientCore.makeClient((short)0, 
true).getInsertContext(true),
                                        
node.clientCore.requestStarters.chkPutScheduler, 
node.clientCore.requestStarters.sskPutScheduler, 
                                        
RequestStarter.INTERACTIVE_PRIORITY_CLASS, false, false, this, null, null, 
false);

Modified: trunk/freenet/src/freenet/node/TextModeClientInterface.java
===================================================================
--- trunk/freenet/src/freenet/node/TextModeClientInterface.java 2008-10-21 
14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/node/TextModeClientInterface.java 2008-10-21 
15:24:47 UTC (rev 23014)
@@ -582,7 +582,7 @@
                        mimeType = ""; // don't need to override it

                FileBucket fb = new FileBucket(f, true, false, false, false, 
false);
-               InsertBlock block = new InsertBlock(fb, new 
ClientMetadata(mimeType), FreenetURI.EMPTY_CHK_URI);
+               InsertBlock block = new InsertBlock(fb, new 
ClientMetadata(mimeType, null), FreenetURI.EMPTY_CHK_URI);

                startTime = System.currentTimeMillis();
                FreenetURI uri = client.insert(block, getCHKOnly, f.getName());

Modified: trunk/freenet/src/freenet/node/fcp/ClientPut.java
===================================================================
--- trunk/freenet/src/freenet/node/fcp/ClientPut.java   2008-10-21 14:22:02 UTC 
(rev 23013)
+++ trunk/freenet/src/freenet/node/fcp/ClientPut.java   2008-10-21 15:24:47 UTC 
(rev 23014)
@@ -117,13 +117,13 @@
                if(persistenceType != PERSIST_CONNECTION)
                        client.register(this, false);
                Bucket tempData = data;
-               ClientMetadata cm = new ClientMetadata(mimeType);
+               ClientMetadata cm = new ClientMetadata(mimeType, null);
                boolean isMetadata = false;
-               boolean logMINOR = Logger.shouldLog(Logger.MINOR, this);
+               logMINOR = Logger.shouldLog(Logger.MINOR, this);
                if(logMINOR) Logger.minor(this, "data = "+tempData+", 
uploadFrom = "+ClientPutMessage.uploadFromString(uploadFrom));
                if(uploadFrom == ClientPutMessage.UPLOAD_FROM_REDIRECT) {
                        this.targetURI = redirectTarget;
-                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
(short)-1, targetURI, cm);
+                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
null, null, targetURI, cm);
                        byte[] d;
                        try {
                                d = m.writeToByteArray();
@@ -200,12 +200,12 @@
                if(persistenceType != PERSIST_CONNECTION)
                        client.register(this, false);
                Bucket tempData = message.bucket;
-               ClientMetadata cm = new ClientMetadata(mimeType);
+               ClientMetadata cm = new ClientMetadata(mimeType, null);
                boolean isMetadata = false;
                if(logMINOR) Logger.minor(this, "data = "+tempData+", 
uploadFrom = "+ClientPutMessage.uploadFromString(uploadFrom));
                if(uploadFrom == ClientPutMessage.UPLOAD_FROM_REDIRECT) {
                        this.targetURI = message.redirectTarget;
-                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
(short)-1, targetURI, cm);
+                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
null, null, targetURI, cm);
                        byte[] d;
                        try {
                                d = m.writeToByteArray();
@@ -292,7 +292,7 @@
                                throw new PersistenceParseException("Unknown 
UploadFrom: "+from);
                }

-               ClientMetadata cm = new ClientMetadata(mimeType);
+               ClientMetadata cm = new ClientMetadata(mimeType, null);

                boolean isMetadata = false;

@@ -326,7 +326,7 @@
                        targetURI = new FreenetURI(target);
                        if(logMINOR)
                                Logger.minor(this, "Uploading from redirect for 
"+this+" : "+targetURI);
-                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
(short)-1, targetURI, cm);
+                       Metadata m = new Metadata(Metadata.SIMPLE_REDIRECT, 
null, null, targetURI, cm);
                        byte[] d;
                        try {
                                d = m.writeToByteArray();

Modified: trunk/freenet/src/freenet/node/fcp/DirPutFile.java
===================================================================
--- trunk/freenet/src/freenet/node/fcp/DirPutFile.java  2008-10-21 14:22:02 UTC 
(rev 23013)
+++ trunk/freenet/src/freenet/node/fcp/DirPutFile.java  2008-10-21 15:24:47 UTC 
(rev 23014)
@@ -26,9 +26,9 @@
                        throw new 
MessageInvalidException(ProtocolErrorMessage.MISSING_FIELD, "Missing field 
Name", identifier, global);
                String contentTypeOverride = subset.get("Metadata.ContentType");
                if(contentTypeOverride != null) {
-                       meta = new ClientMetadata(contentTypeOverride);
+                       meta = new ClientMetadata(contentTypeOverride, null);
                } else {
-                       meta = new ClientMetadata(guessMIME());
+                       meta = new ClientMetadata(guessMIME(), null);
                }
        }


Modified: trunk/freenet/src/freenet/node/simulator/BootstrapPushPullTest.java
===================================================================
--- trunk/freenet/src/freenet/node/simulator/BootstrapPushPullTest.java 
2008-10-21 14:22:02 UTC (rev 23013)
+++ trunk/freenet/src/freenet/node/simulator/BootstrapPushPullTest.java 
2008-10-21 15:24:47 UTC (rev 23014)
@@ -76,7 +76,7 @@
         os.close();
         System.out.println("Inserting test data.");
         HighLevelSimpleClient client = node.clientCore.makeClient((short)0);
-        InsertBlock block = new InsertBlock(data, new ClientMetadata(null), 
FreenetURI.EMPTY_CHK_URI);
+        InsertBlock block = new InsertBlock(data, new ClientMetadata(null, 
null), FreenetURI.EMPTY_CHK_URI);
         long startInsertTime = System.currentTimeMillis();
         FreenetURI uri;
         try {


Reply via email to