Author: toad
Date: 2006-01-06 19:26:42 +0000 (Fri, 06 Jan 2006)
New Revision: 7769
Modified:
trunk/freenet/src/freenet/keys/CHKBlock.java
trunk/freenet/src/freenet/keys/SSKBlock.java
trunk/freenet/src/freenet/node/Node.java
trunk/freenet/src/freenet/node/RealNodePingTest.java
trunk/freenet/src/freenet/node/RealNodeRequestInsertTest.java
trunk/freenet/src/freenet/node/RealNodeRoutingTest.java
trunk/freenet/src/freenet/node/Version.java
trunk/freenet/src/freenet/store/BerkeleyDBFreenetStore.java
Log:
323:
Create SSK datastore, rename CHK datastore.
Also other SSK-related changes.
Modified: trunk/freenet/src/freenet/keys/CHKBlock.java
===================================================================
--- trunk/freenet/src/freenet/keys/CHKBlock.java 2006-01-06 18:35:38 UTC
(rev 7768)
+++ trunk/freenet/src/freenet/keys/CHKBlock.java 2006-01-06 19:26:42 UTC
(rev 7769)
@@ -34,6 +34,7 @@
final short hashIdentifier;
final NodeCHK chk;
public static final int MAX_LENGTH_BEFORE_COMPRESSION = Integer.MAX_VALUE;
+ public static final int TOTAL_HEADERS_LENGTH = 36;
public String toString() {
return super.toString()+": chk="+chk;
@@ -60,7 +61,8 @@
public CHKBlock(byte[] data2, byte[] header2, NodeCHK key, boolean verify)
throws CHKVerifyException {
data = data2;
header = header2;
- if(header.length < 2) throw new IllegalArgumentException("Too short:
"+header.length);
+ if(header.length != TOTAL_HEADERS_LENGTH)
+ throw new IllegalArgumentException("Wrong length:
"+header.length+" should be "+TOTAL_HEADERS_LENGTH);
hashIdentifier = (short)(((header[0] & 0xff) << 8) + (header[1] &
0xff));
this.chk = key;
// Logger.debug(CHKBlock.class, "Data length: "+data.length+", header
length: "+header.length);
Modified: trunk/freenet/src/freenet/keys/SSKBlock.java
===================================================================
--- trunk/freenet/src/freenet/keys/SSKBlock.java 2006-01-06 18:35:38 UTC
(rev 7768)
+++ trunk/freenet/src/freenet/keys/SSKBlock.java 2006-01-06 19:26:42 UTC
(rev 7769)
@@ -47,12 +47,16 @@
static final short SIG_R_LENGTH = 32;
static final short SIG_S_LENGTH = 32;
static final short E_H_DOCNAME_LENGTH = 32;
-
+ static public final short TOTAL_HEADERS_LENGTH = 2 + SIG_R_LENGTH +
SIG_S_LENGTH + 2 +
+ E_H_DOCNAME_LENGTH + ClientSSKBlock.DATA_DECRYPT_KEY_LENGTH + 2 + 2;
+
/**
* Initialize, and verify data, headers against key. Provided
* key must have a pubkey, or we throw.
*/
public SSKBlock(byte[] data, byte[] headers, NodeSSK nodeKey) throws
SSKVerifyException {
+ if(headers.length != TOTAL_HEADERS_LENGTH)
+ throw new
IllegalArgumentException("Headers.length="+headers.length+" should be
"+TOTAL_HEADERS_LENGTH);
this.data = data;
this.headers = headers;
this.nodeKey = nodeKey;
@@ -61,7 +65,6 @@
throw new SSKVerifyException("Data length wrong:
"+data.length+" should be "+DATA_LENGTH);
if(pubKey == null)
throw new SSKVerifyException("PubKey was null from
"+nodeKey);
- if(headers.length < 2) throw new IllegalArgumentException("Too short:
"+headers.length);
hashIdentifier = (short)(((headers[0] & 0xff) << 8) + (headers[1] &
0xff));
if(hashIdentifier != HASH_SHA256)
throw new SSKVerifyException("Hash not SHA-256");
Modified: trunk/freenet/src/freenet/node/Node.java
===================================================================
--- trunk/freenet/src/freenet/node/Node.java 2006-01-06 18:35:38 UTC (rev
7768)
+++ trunk/freenet/src/freenet/node/Node.java 2006-01-06 19:26:42 UTC (rev
7769)
@@ -27,6 +27,7 @@
import freenet.client.ArchiveManager;
import freenet.client.HighLevelSimpleClient;
import freenet.client.HighLevelSimpleClientImpl;
+import freenet.crypt.DSAPublicKey;
import freenet.crypt.DiffieHellman;
import freenet.crypt.RandomSource;
import freenet.crypt.Yarrow;
@@ -46,14 +47,15 @@
import freenet.keys.ClientCHKBlock;
import freenet.keys.ClientKey;
import freenet.keys.ClientKeyBlock;
-import freenet.keys.KeyBlock;
import freenet.keys.NodeCHK;
-import freenet.store.BaseFreenetStore;
+import freenet.keys.SSKBlock;
import freenet.store.BerkeleyDBFreenetStore;
import freenet.store.FreenetStore;
import freenet.support.BucketFactory;
import freenet.support.FileLoggerHook;
import freenet.support.HexUtil;
+import freenet.support.ImmutableByteArrayWrapper;
+import freenet.support.LRUHashtable;
import freenet.support.LRUQueue;
import freenet.support.Logger;
import freenet.support.PaddedEphemerallyEncryptedBucketFactory;
@@ -118,8 +120,10 @@
final int portNumber;
/** These 3 are private because must be protected by synchronized(this) */
- /** The datastore */
- private final FreenetStore datastore;
+ /** The CHK datastore */
+ private final FreenetStore chkDatastore;
+ /** The SSK datastore */
+ private final FreenetStore sskDatastore;
/** RequestSender's currently running, by KeyHTLPair */
private final HashMap requestSenders;
/** RequestSender's currently transferring, by key */
@@ -148,6 +152,8 @@
final String filenamesPrefix;
final FilenameGenerator tempFilenameGenerator;
final FileLoggerHook fileLoggerHook;
+ static final int MAX_CACHED_KEYS = 1000;
+ final LRUHashtable cachedPubKeys;
final boolean testnetEnabled;
final int testnetPort;
static short MAX_HTL = 10;
@@ -309,7 +315,7 @@
}
}
DiffieHellman.init(yarrow);
- Node n = new Node(port, yarrow, overrideIP, "", 1000 /
packetsPerSecond, true, logger);
+ Node n = new Node(port, yarrow, overrideIP, "", 1000 /
packetsPerSecond, true, logger, 16384);
n.start(new StaticSwapRequestInterval(2000));
new TextModeClientInterface(n);
Thread t = new Thread(new MemoryChecker(), "Memory checker");
@@ -319,8 +325,9 @@
// FIXME - the whole overrideIP thing is a hack to avoid config
// Implement the config!
- Node(int port, RandomSource rand, InetAddress overrideIP, String prefix,
int throttleInterval, boolean enableTestnet, FileLoggerHook logger) {
+ Node(int port, RandomSource rand, InetAddress overrideIP, String prefix,
int throttleInterval, boolean enableTestnet, FileLoggerHook logger, int
maxStoreKeys) {
this.fileLoggerHook = logger;
+ cachedPubKeys = new LRUHashtable();
if(enableTestnet) {
Logger.error(this, "WARNING: ENABLING TESTNET CODE! This may
seriously jeopardize your anonymity!");
testnetEnabled = true;
@@ -343,7 +350,8 @@
downloadDir = new File("downloads");
downloadDir.mkdir();
try {
- datastore = new BerkeleyDBFreenetStore(prefix+"store-"+portNumber,
32768); // 1GB
+ chkDatastore = new
BerkeleyDBFreenetStore(prefix+"store-"+portNumber, maxStoreKeys, 32768,
CHKBlock.TOTAL_HEADERS_LENGTH);
+ sskDatastore = new
BerkeleyDBFreenetStore(prefix+"sskstore-"+portNumber, maxStoreKeys, 1024,
SSKBlock.TOTAL_HEADERS_LENGTH);
} catch (FileNotFoundException e1) {
Logger.error(this, "Could not open datastore: "+e1, e1);
System.err.println("Could not open datastore: "+e1);
@@ -559,7 +567,7 @@
synchronized(this) {
if(cache) {
try {
- datastore.put(block);
+ chkDatastore.put(block);
} catch (IOException e) {
Logger.error(this, "Datastore failure: "+e, e);
}
@@ -749,7 +757,7 @@
// In store?
CHKBlock chk = null;
try {
- chk = datastore.fetch(key, !cache);
+ chk = chkDatastore.fetch(key, !cache);
} catch (IOException e) {
Logger.error(this, "Error accessing store: "+e, e);
}
@@ -828,7 +836,7 @@
*/
public synchronized void store(CHKBlock block) {
try {
- datastore.put(block);
+ chkDatastore.put(block);
} catch (IOException e) {
Logger.error(this, "Cannot store data: "+e, e);
}
@@ -1067,4 +1075,32 @@
if(newIP.equals(lastIP)) return;
writeNodeFile();
}
+
+ /**
+ * Look up a cached public key by its hash.
+ */
+ public DSAPublicKey getKey(byte[] hash) {
+ ImmutableByteArrayWrapper w = new
ImmutableByteArrayWrapper(hash);
+ synchronized(cachedPubKeys) {
+ DSAPublicKey key = (DSAPublicKey) cachedPubKeys.get(w);
+ if(key != null)
+ cachedPubKeys.push(w, key);
+ return key;
+ }
+ }
+
+ /**
+ * Cache a public key
+ */
+ public void cacheKey(byte[] hash, DSAPublicKey key) {
+ ImmutableByteArrayWrapper w = new
ImmutableByteArrayWrapper(hash);
+ synchronized(cachedPubKeys) {
+ DSAPublicKey key2 = (DSAPublicKey) cachedPubKeys.get(w);
+ if(key2 != null && !key2.equals(key))
+ throw new IllegalArgumentException("Wrong
hash?? Already have different key with same hash!");
+ cachedPubKeys.push(w, key);
+ while(cachedPubKeys.size() > MAX_CACHED_KEYS)
+ cachedPubKeys.popKey();
+ }
+ }
}
Modified: trunk/freenet/src/freenet/node/RealNodePingTest.java
===================================================================
--- trunk/freenet/src/freenet/node/RealNodePingTest.java 2006-01-06
18:35:38 UTC (rev 7768)
+++ trunk/freenet/src/freenet/node/RealNodePingTest.java 2006-01-06
19:26:42 UTC (rev 7769)
@@ -25,8 +25,8 @@
Yarrow yarrow = new Yarrow();
DiffieHellman.init(yarrow);
// Create 2 nodes
- Node node1 = new Node(5001, yarrow, null, "pingtest-", 0, false, fh);
- Node node2 = new Node(5002, yarrow, null, "pingtest-", 0, false, fh);
+ Node node1 = new Node(5001, yarrow, null, "pingtest-", 0, false, fh,
0);
+ Node node2 = new Node(5002, yarrow, null, "pingtest-", 0, false, fh,
0);
SimpleFieldSet node1ref = node1.exportFieldSet();
SimpleFieldSet node2ref = node2.exportFieldSet();
// Connect
Modified: trunk/freenet/src/freenet/node/RealNodeRequestInsertTest.java
===================================================================
--- trunk/freenet/src/freenet/node/RealNodeRequestInsertTest.java
2006-01-06 18:35:38 UTC (rev 7768)
+++ trunk/freenet/src/freenet/node/RealNodeRequestInsertTest.java
2006-01-06 19:26:42 UTC (rev 7769)
@@ -42,7 +42,7 @@
Node[] nodes = new Node[NUMBER_OF_NODES];
Logger.normal(RealNodeRoutingTest.class, "Creating nodes...");
for(int i=0;i<NUMBER_OF_NODES;i++) {
- nodes[i] = new Node(5000+i, random, null, wd+File.separator, 0,
false, fh);
+ nodes[i] = new Node(5000+i, random, null, wd+File.separator, 0,
false, fh, 100);
nodes[i].usm.setDropProbability(20); // 5%
Logger.normal(RealNodeRoutingTest.class, "Created node "+i);
}
Modified: trunk/freenet/src/freenet/node/RealNodeRoutingTest.java
===================================================================
--- trunk/freenet/src/freenet/node/RealNodeRoutingTest.java 2006-01-06
18:35:38 UTC (rev 7768)
+++ trunk/freenet/src/freenet/node/RealNodeRoutingTest.java 2006-01-06
19:26:42 UTC (rev 7769)
@@ -36,7 +36,7 @@
Node[] nodes = new Node[NUMBER_OF_NODES];
Logger.normal(RealNodeRoutingTest.class, "Creating nodes...");
for(int i=0;i<NUMBER_OF_NODES;i++) {
- nodes[i] = new Node(5000+i, random, null, wd+File.separator, 0,
false, fh);
+ nodes[i] = new Node(5000+i, random, null, wd+File.separator, 0,
false, fh, 0);
Logger.normal(RealNodeRoutingTest.class, "Created node "+i);
}
SimpleFieldSet refs[] = new SimpleFieldSet[NUMBER_OF_NODES];
Modified: trunk/freenet/src/freenet/node/Version.java
===================================================================
--- trunk/freenet/src/freenet/node/Version.java 2006-01-06 18:35:38 UTC (rev
7768)
+++ trunk/freenet/src/freenet/node/Version.java 2006-01-06 19:26:42 UTC (rev
7769)
@@ -20,7 +20,7 @@
public static final String protocolVersion = "1.0";
/** The build number of the current revision */
- public static final int buildNumber = 322;
+ public static final int buildNumber = 323;
/** Oldest build of Fred we will talk to */
public static final int lastGoodBuild = 318;
Modified: trunk/freenet/src/freenet/store/BerkeleyDBFreenetStore.java
===================================================================
--- trunk/freenet/src/freenet/store/BerkeleyDBFreenetStore.java 2006-01-06
18:35:38 UTC (rev 7768)
+++ trunk/freenet/src/freenet/store/BerkeleyDBFreenetStore.java 2006-01-06
19:26:42 UTC (rev 7769)
@@ -39,8 +39,8 @@
*/
public class BerkeleyDBFreenetStore implements FreenetStore {
- static final int CHK_DATA_BLOCK_SIZE = 32*1024;
- static final int CHK_HEADER_BLOCK_SIZE = 36;
+ final int dataBlockSize;
+ final int headerBlockSize;
private final Environment environment;
private final TupleBinding storeBlockTupleBinding;
@@ -61,8 +61,9 @@
* @param the directory where the store is located
* @throws FileNotFoundException if the dir does not exist and could not
be created
*/
- public BerkeleyDBFreenetStore(String storeDir,int maxChkBlocks) throws
Exception
- {
+ public BerkeleyDBFreenetStore(String storeDir, int maxChkBlocks, int
blockSize, int headerSize) throws Exception {
+ this.dataBlockSize = blockSize;
+ this.headerBlockSize = headerSize;
// Percentage of the database that must contain usefull data
// decrease to increase performance, increase to save disk space
System.setProperty("je.cleaner.minUtilization","98");
@@ -150,10 +151,10 @@
CHKBlock block = null;
try{
- byte[] header = new byte[CHK_HEADER_BLOCK_SIZE];
- byte[] data = new byte[CHK_DATA_BLOCK_SIZE];
+ byte[] header = new byte[headerBlockSize];
+ byte[] data = new byte[dataBlockSize];
synchronized(chkStore) {
-
chkStore.seek(storeBlock.offset*(long)(CHK_DATA_BLOCK_SIZE+CHK_HEADER_BLOCK_SIZE));
+
chkStore.seek(storeBlock.offset*(long)(dataBlockSize+headerBlockSize));
chkStore.read(header);
chkStore.read(data);
}
@@ -213,12 +214,12 @@
byte[] data = block.getData();
byte[] header = block.getHeader();
- if(data.length!=CHK_DATA_BLOCK_SIZE) {
- Logger.minor(this, "This data is "+data.length+" bytes. Should
be "+CHK_DATA_BLOCK_SIZE);
+ if(data.length!=dataBlockSize) {
+ Logger.minor(this, "This data is "+data.length+" bytes. Should
be "+dataBlockSize);
return;
}
- if(header.length!=CHK_HEADER_BLOCK_SIZE) {
- Logger.minor(this, "This header is "+data.length+" bytes.
Should be "+CHK_HEADER_BLOCK_SIZE);
+ if(header.length!=headerBlockSize) {
+ Logger.minor(this, "This header is "+data.length+" bytes.
Should be "+headerBlockSize);
return;
}
@@ -231,7 +232,7 @@
synchronized(chkStore) {
if(chkBlocksInStore<maxChkBlocks) {
// Expand the store file
- int byteOffset =
chkBlocksInStore*(CHK_DATA_BLOCK_SIZE+CHK_HEADER_BLOCK_SIZE);
+ int byteOffset =
chkBlocksInStore*(dataBlockSize+headerBlockSize);
StoreBlock storeBlock = new
StoreBlock(chkBlocksInStore);
DatabaseEntry blockDBE = new DatabaseEntry();
storeBlockTupleBinding.objectToEntry(storeBlock,
blockDBE);
@@ -254,7 +255,7 @@
DatabaseEntry blockDBE = new DatabaseEntry();
storeBlockTupleBinding.objectToEntry(storeBlock, blockDBE);
chkDB.put(t,routingkeyDBE,blockDBE);
-
chkStore.seek(storeBlock.getOffset()*(long)(CHK_DATA_BLOCK_SIZE+CHK_HEADER_BLOCK_SIZE));
+
chkStore.seek(storeBlock.getOffset()*(long)(dataBlockSize+headerBlockSize));
chkStore.write(header);
chkStore.write(data);
t.commit();