Author: cutting Date: Mon Dec 18 12:58:52 2006 New Revision: 488415 URL: http://svn.apache.org/viewvc?view=rev&rev=488415 Log: HADOOP-829. Within HDFS, clearly separate three different representations for datanodes. Contributed by Dhruba.
Modified: lucene/hadoop/trunk/CHANGES.txt lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeDescriptor.java lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeID.java lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeInfo.java lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java Modified: lucene/hadoop/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=488415&r1=488414&r2=488415 ============================================================================== --- lucene/hadoop/trunk/CHANGES.txt (original) +++ lucene/hadoop/trunk/CHANGES.txt Mon Dec 18 12:58:52 2006 @@ -114,6 +114,11 @@ 32. HADOOP-811. Add a utility, MultithreadedMapRunner. (Alejandro Abdelnur via cutting) +33. HADOOP-829. Within HDFS, clearly separate three different + representations for datanodes: one for RPCs, one for + namenode-internal use, and one for namespace persistence. + (Dhruba Borthakur via cutting) + Release 0.9.2 - 2006-12-15 Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeDescriptor.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeDescriptor.java?view=diff&rev=488415&r1=488414&r2=488415 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeDescriptor.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeDescriptor.java Mon Dec 18 12:58:52 2006 @@ -24,6 +24,11 @@ * such as available storage capacity, last update time, etc., * and maintains a set of blocks stored on the datanode. * + * This data structure is a data structure that is internal + * to the namenode. It is *not* sent over-the-wire to the Client + * or the Datnodes. Neither is it stored persistently in the + * fsImage. + * @author Mike Cafarella * @author Konstantin Shvachko **************************************************/ Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeID.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeID.java?view=diff&rev=488415&r1=488414&r2=488415 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeID.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeID.java Mon Dec 18 12:58:52 2006 @@ -68,7 +68,14 @@ public int getInfoPort() { return infoPort; } - + + /** + * @sets data storage ID. + */ + void setStorageID(String storageID) { + this.storageID = storageID; + } + /** * @return hostname and no :portNumber. */ Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeInfo.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeInfo.java?view=diff&rev=488415&r1=488414&r2=488415 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeInfo.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DatanodeInfo.java Mon Dec 18 12:58:52 2006 @@ -30,6 +30,8 @@ /** * DatanodeInfo represents the status of a DataNode. + * This object is used for communication in the + * Datanode Protocol and the Client Protocol. * * @author Mike Cafarella * @author Konstantin Shvachko @@ -71,6 +73,26 @@ /** number of active connections */ public int getXceiverCount() { return xceiverCount; } + + /** Sets raw capacity. */ + void setCapacity(long capacity) { + this.capacity = capacity; + } + + /** Sets raw free space. */ + void setRemaining(long remaining) { + this.remaining = remaining; + } + + /** Sets time when this information was accurate. */ + void setLastUpdate(long lastUpdate) { + this.lastUpdate = lastUpdate; + } + + /** Sets number of active connections */ + void setXceiverCount(int xceiverCount) { + this.xceiverCount = xceiverCount; + } /** A formatted string for reporting the status of the DataNode. */ public String getDatanodeReport() { Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java?view=diff&rev=488415&r1=488414&r2=488415 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/FSImage.java Mon Dec 18 12:58:52 2006 @@ -25,6 +25,8 @@ import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; +import java.io.DataInput; +import java.io.DataOutput; import java.util.Iterator; import java.util.Map; import java.util.Random; @@ -33,6 +35,12 @@ import org.apache.hadoop.dfs.FSDirectory.INode; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.io.UTF8; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.WritableFactories; +import org.apache.hadoop.io.WritableFactory; +import org.apache.hadoop.io.WritableUtils; +import org.apache.hadoop.io.WritableComparable; +import org.apache.hadoop.io.UTF8; /** * FSImage handles checkpointing and logging of the namespace edits. @@ -318,8 +326,10 @@ Map datanodeMap = FSNamesystem.getFSNamesystem().datanodeMap; int size = datanodeMap.size(); out.writeInt( size ); - for( Iterator it = datanodeMap.values().iterator(); it.hasNext(); ) - ((DatanodeDescriptor)it.next()).write( out ); + for( Iterator it = datanodeMap.values().iterator(); it.hasNext(); ) { + DatanodeImage nodeImage = new DatanodeImage((DatanodeDescriptor) it.next()); + nodeImage.write( out ); + } } void loadDatanodes( int version, DataInputStream in ) throws IOException { @@ -328,9 +338,75 @@ FSNamesystem fsNamesys = FSNamesystem.getFSNamesystem(); int size = in.readInt(); for( int i = 0; i < size; i++ ) { - DatanodeDescriptor node = new DatanodeDescriptor(); - node.readFields(in); - fsNamesys.unprotectedAddDatanode( node ); + DatanodeImage nodeImage = new DatanodeImage(); + nodeImage.readFields(in); + fsNamesys.unprotectedAddDatanode(nodeImage.getDatanodeDescriptor()); + } + } + + class DatanodeImage implements WritableComparable { + + /************************************************** + * DatanodeImage is used to store persistent information + * about datanodes into the fsImage. + **************************************************/ + DatanodeDescriptor node; + + DatanodeImage() { + node = new DatanodeDescriptor(); + } + + DatanodeImage(DatanodeDescriptor from) { + node = from; + } + + /** + * Returns the underlying Datanode Descriptor + */ + DatanodeDescriptor getDatanodeDescriptor() { + return node; + } + + public int compareTo(Object o) { + return node.compareTo(o); + } + + ///////////////////////////////////////////////// + // Writable + ///////////////////////////////////////////////// + /** + * Public method that serializes the information about a + * Datanode to be stored in the fsImage. + */ + public void write(DataOutput out) throws IOException { + DatanodeID id = new DatanodeID(node.getName(), node.getStorageID(), + node.getInfoPort()); + id.write(out); + out.writeLong(node.getCapacity()); + out.writeLong(node.getRemaining()); + out.writeLong(node.getLastUpdate()); + out.writeInt(node.getXceiverCount()); + } + + /** + * Public method that reads a serialized Datanode + * from the fsImage. + */ + public void readFields(DataInput in) throws IOException { + DatanodeID id = new DatanodeID(); + id.readFields(in); + long capacity = in.readLong(); + long remaining = in.readLong(); + long lastUpdate = in.readLong(); + int xceiverCount = in.readInt(); + + // update the DatanodeDescriptor with the data we read in + node.updateRegInfo(id); + node.setStorageID(id.getStorageID()); + node.setCapacity(capacity); + node.setRemaining(remaining); + node.setLastUpdate(lastUpdate); + node.setXceiverCount(xceiverCount); } } }