Author: arp Date: Fri Feb 14 20:56:24 2014 New Revision: 1568517 URL: http://svn.apache.org/r1568517 Log: HDFS-5494. Merge Protobuf-based-FSImage code from trunk - fix build break after merge. (Contributed by Jing Zhao)
Modified: hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyConsiderLoad.java Modified: hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt?rev=1568517&r1=1568516&r2=1568517&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt (original) +++ hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt Fri Feb 14 20:56:24 2014 @@ -36,3 +36,6 @@ HDFS-5535 subtasks: HDFS-5535. Add BlockPoolSliceStorage 'trash' to handle block deletions during rolling upgrades. (Arpit Agarwal) + HDFS-5494. Merge Protobuf-based-FSImage code from trunk - fix build + break after the merge. (Jing Zhao via Arpit Agarwal) + Modified: hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java?rev=1568517&r1=1568516&r2=1568517&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java (original) +++ hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java Fri Feb 14 20:56:24 2014 @@ -428,7 +428,7 @@ public final class FSImageFormatProtobuf FileSummary.Builder b = FileSummary.newBuilder() .setOndiskVersion(FSImageUtil.FILE_VERSION) - .setLayoutVersion(LayoutVersion.getCurrentLayoutVersion()); + .setLayoutVersion(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); codec = compression.getImageCodec(); if (codec != null) { Modified: hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java?rev=1568517&r1=1568516&r2=1568517&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java (original) +++ hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageUtil.java Fri Feb 14 20:56:24 2014 @@ -71,7 +71,7 @@ public final class FSImageUtil { + summary.getOndiskVersion()); } - if (!LayoutVersion.supports(Feature.PROTOBUF_FORMAT, + if (!NameNodeLayoutVersion.supports(Feature.PROTOBUF_FORMAT, summary.getLayoutVersion())) { throw new IOException("Unsupported layout version " + summary.getLayoutVersion()); Modified: hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyConsiderLoad.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyConsiderLoad.java?rev=1568517&r1=1568516&r2=1568517&view=diff ============================================================================== --- hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyConsiderLoad.java (original) +++ hadoop/common/branches/HDFS-5535/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicyConsiderLoad.java Fri Feb 14 20:56:24 2014 @@ -27,6 +27,7 @@ import org.apache.hadoop.hdfs.protocol.D import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.server.common.StorageInfo; +import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.test.PathUtils; @@ -87,7 +88,8 @@ public class TestReplicationPolicyConsid // Register DNs for (int i=0; i < 6; i++) { DatanodeRegistration dnr = new DatanodeRegistration(dataNodes[i], - new StorageInfo(), new ExportedBlockKeys(), VersionInfo.getVersion()); + new StorageInfo(NodeType.DATA_NODE), new ExportedBlockKeys(), + VersionInfo.getVersion()); dnrList.add(dnr); dnManager.registerDatanode(dnr); dataNodes[i].getStorageInfos()[0].setUtilizationForTesting(