Author: szetszwo Date: Wed Sep 11 06:30:01 2013 New Revision: 1521743 URL: http://svn.apache.org/r1521743 Log: HDFS-5157. Add StorageType to FsVolume. Contributed by Junping Du
Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-2832.txt Wed Sep 11 06:30:01 2013 @@ -21,3 +21,5 @@ IMPROVEMENTS: HDFS-5134. Move blockContentsStale, heartbeatedSinceFailover and firstBlockReport from DatanodeDescriptor to DatanodeStorageInfo; and fix a synchronization problem in DatanodeStorageInfo. (szetszwo) + + HDFS-5157. Add StorageType to FsVolume. (Junping Du via szetszwo) Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java Wed Sep 11 06:30:01 2013 @@ -52,7 +52,6 @@ import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.DataOutputStream; -import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; @@ -62,7 +61,6 @@ import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketException; import java.net.SocketTimeoutException; -import java.net.URI; import java.net.UnknownHostException; import java.nio.channels.ClosedByInterruptException; import java.nio.channels.SocketChannel; @@ -122,7 +120,6 @@ import org.apache.hadoop.hdfs.server.com import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.StorageInfo; -import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; @@ -160,7 +157,6 @@ import org.apache.hadoop.util.GenericOpt import org.apache.hadoop.util.JvmPauseMonitor; import org.apache.hadoop.util.ServicePlugin; import org.apache.hadoop.util.StringUtils; -import org.apache.hadoop.util.Time; import org.apache.hadoop.util.VersionInfo; import org.mortbay.util.ajax.JSON; @@ -1709,7 +1705,8 @@ public class DataNode extends Configured return locations; } - static Collection<StorageLocation> getStorageLocations(Configuration conf) { + public static Collection<StorageLocation> getStorageLocations( + Configuration conf) { return parseStorageLocations( conf.getTrimmedStringCollection(DFS_DATANODE_DATA_DIR_KEY)); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java Wed Sep 11 06:30:01 2013 @@ -20,6 +20,8 @@ package org.apache.hadoop.hdfs.server.da import java.io.File; import java.io.IOException; +import org.apache.hadoop.hdfs.StorageType; + /** * This is an interface for the underlying volume. */ @@ -38,4 +40,6 @@ public interface FsVolumeSpi { /** @return the directory for the finalized blocks in the block pool. */ public File getFinalizedDir(String bpid) throws IOException; + + public StorageType getStorageType(); } \ No newline at end of file Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java Wed Sep 11 06:30:01 2013 @@ -44,6 +44,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.BlockLocalPathInfo; @@ -65,6 +66,7 @@ import org.apache.hadoop.hdfs.server.dat import org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException; import org.apache.hadoop.hdfs.server.datanode.ReplicaUnderRecovery; import org.apache.hadoop.hdfs.server.datanode.ReplicaWaitingToBeRecovered; +import org.apache.hadoop.hdfs.server.datanode.StorageLocation; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.LengthInputStream; @@ -188,6 +190,7 @@ class FsDatasetImpl implements FsDataset DFSConfigKeys.DFS_DATANODE_FAILED_VOLUMES_TOLERATED_DEFAULT); String[] dataDirs = conf.getTrimmedStrings(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY); + Collection<StorageLocation> dataLocations = DataNode.getStorageLocations(conf); int volsConfigured = (dataDirs == null) ? 0 : dataDirs.length; int volsFailed = volsConfigured - storage.getNumStorageDirs(); @@ -209,8 +212,13 @@ class FsDatasetImpl implements FsDataset storage.getNumStorageDirs()); for (int idx = 0; idx < storage.getNumStorageDirs(); idx++) { final File dir = storage.getStorageDir(idx).getCurrentDir(); - volArray.add(new FsVolumeImpl(this, storage.getStorageID(), dir, conf)); - LOG.info("Added volume - " + dir); + // TODO: getStorageTypeFromLocations() is only a temporary workaround and + // should be replaced with getting storage type from DataStorage (missing + // storage type now) directly. + final StorageType storageType = getStorageTypeFromLocations(dataLocations, dir); + volArray.add(new FsVolumeImpl(this, storage.getStorageID(), dir, conf, + storageType)); + LOG.info("Added volume - " + dir + ", StorageType: " + storageType); } volumeMap = new ReplicaMap(this); @@ -231,6 +239,16 @@ class FsDatasetImpl implements FsDataset registerMBean(storage.getStorageID()); } + private StorageType getStorageTypeFromLocations( + Collection<StorageLocation> dataLocations, File dir) { + for (StorageLocation dataLocation : dataLocations) { + if (dataLocation.getFile().equals(dir)) { + return dataLocation.getStorageType(); + } + } + return StorageType.DEFAULT; + } + /** * Return the total space used by dfs datanode */ Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeImpl.java Wed Sep 11 06:30:01 2013 @@ -29,6 +29,7 @@ import org.apache.hadoop.conf.Configurat import org.apache.hadoop.fs.DF; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.datanode.DataStorage; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; @@ -43,6 +44,7 @@ import org.apache.hadoop.util.DiskChecke class FsVolumeImpl implements FsVolumeSpi { private final FsDatasetImpl dataset; private final String storageID; + private final StorageType storageType; private final Map<String, BlockPoolSlice> bpSlices = new HashMap<String, BlockPoolSlice>(); private final File currentDir; // <StorageDirectory>/current @@ -50,7 +52,7 @@ class FsVolumeImpl implements FsVolumeSp private final long reserved; FsVolumeImpl(FsDatasetImpl dataset, String storageID, File currentDir, - Configuration conf) throws IOException { + Configuration conf, StorageType storageType) throws IOException { this.dataset = dataset; this.storageID = storageID; this.reserved = conf.getLong( @@ -59,6 +61,7 @@ class FsVolumeImpl implements FsVolumeSp this.currentDir = currentDir; File parent = currentDir.getParentFile(); this.usage = new DF(parent, conf); + this.storageType = storageType; } File getCurrentDir() { @@ -290,4 +293,9 @@ class FsVolumeImpl implements FsVolumeSp String getStorageID() { return storageID; } + + @Override + public StorageType getStorageType() { + return storageType; + } } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsVolumeList.java Wed Sep 11 06:30:01 2013 @@ -56,6 +56,7 @@ class FsVolumeList { * @param blockSize free space needed on the volume * @return next volume to store the block in. */ + // TODO this will be replaced by getting volume from StorageID directly later. synchronized FsVolumeImpl getNextVolume(long blockSize) throws IOException { return blockChooser.chooseVolume(volumes, blockSize); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java?rev=1521743&r1=1521742&r2=1521743&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java Wed Sep 11 06:30:01 2013 @@ -40,6 +40,7 @@ import org.apache.hadoop.hdfs.DFSConfigK import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.server.common.GenerationStamp; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; @@ -406,6 +407,11 @@ public class TestDirectoryScanner { public File getFinalizedDir(String bpid) throws IOException { return new File("/base/current/" + bpid + "/finalized"); } + + @Override + public StorageType getStorageType() { + return StorageType.DEFAULT; + } } private final static TestFsVolumeSpi TEST_VOLUME = new TestFsVolumeSpi();