See https://builds.apache.org/job/Hadoop-Hdfs-trunk/751/
################################################################################### ########################## LAST 60 LINES OF THE CONSOLE ########################### [...truncated 1499156 lines...] [junit] 2011-08-15 22:17:08,057 INFO datanode.FSDatasetAsyncDiskService (FSDatasetAsyncDiskService.java:shutdown(142)) - All async disk service threads have been shut down. [junit] 2011-08-15 22:17:08,057 INFO mortbay.log (Slf4jLog.java:info(67)) - Stopped SelectChannelConnector@localhost:0 [junit] 2011-08-15 22:17:08,158 WARN blockmanagement.DecommissionManager (DecommissionManager.java:run(75)) - Monitor interrupted: java.lang.InterruptedException: sleep interrupted [junit] 2011-08-15 22:17:08,158 WARN blockmanagement.BlockManager (BlockManager.java:run(2614)) - ReplicationMonitor thread received InterruptedException. [junit] java.lang.InterruptedException: sleep interrupted [junit] at java.lang.Thread.sleep(Native Method) [junit] at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor.run(BlockManager.java:2612) [junit] at java.lang.Thread.run(Thread.java:662) [junit] 2011-08-15 22:17:08,159 INFO namenode.FSEditLog (FSEditLog.java:endCurrentLogSegment(859)) - Ending log segment 1 [junit] 2011-08-15 22:17:08,170 INFO namenode.FSEditLog (FSEditLog.java:printStatistics(492)) - Number of transactions: 8 Total time for transactions(ms): 0Number of transactions batched in Syncs: 0 Number of syncs: 7 SyncTimes(ms): 94 83 [junit] 2011-08-15 22:17:08,171 INFO ipc.Server (Server.java:stop(1715)) - Stopping server on 42042 [junit] 2011-08-15 22:17:08,172 INFO ipc.Server (Server.java:run(1539)) - IPC Server handler 0 on 42042: exiting [junit] 2011-08-15 22:17:08,172 INFO ipc.Server (Server.java:run(505)) - Stopping IPC Server listener on 42042 [junit] 2011-08-15 22:17:08,172 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stop(199)) - Stopping DataNode metrics system... [junit] 2011-08-15 22:17:08,172 INFO ipc.Server (Server.java:run(647)) - Stopping IPC Server Responder [junit] 2011-08-15 22:17:08,172 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics [junit] 2011-08-15 22:17:08,173 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source NameNodeActivity [junit] 2011-08-15 22:17:08,173 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort42042 [junit] 2011-08-15 22:17:08,173 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort42042 [junit] 2011-08-15 22:17:08,173 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source FSNamesystem [junit] 2011-08-15 22:17:08,173 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort47877 [junit] 2011-08-15 22:17:08,174 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort47877 [junit] 2011-08-15 22:17:08,174 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-1 [junit] 2011-08-15 22:17:08,174 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-34652 [junit] 2011-08-15 22:17:08,174 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort55190 [junit] 2011-08-15 22:17:08,174 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort55190 [junit] 2011-08-15 22:17:08,175 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-2 [junit] 2011-08-15 22:17:08,175 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-57909 [junit] 2011-08-15 22:17:08,175 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort51148 [junit] 2011-08-15 22:17:08,175 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort51148 [junit] 2011-08-15 22:17:08,176 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-3 [junit] 2011-08-15 22:17:08,176 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-46009 [junit] 2011-08-15 22:17:08,176 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort60950 [junit] 2011-08-15 22:17:08,176 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort60950 [junit] 2011-08-15 22:17:08,176 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-4 [junit] 2011-08-15 22:17:08,177 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-35625 [junit] 2011-08-15 22:17:08,177 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stop(205)) - DataNode metrics system stopped. [junit] 2011-08-15 22:17:08,177 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:shutdown(553)) - DataNode metrics system shutdown complete. [junit] Tests run: 16, Failures: 0, Errors: 0, Time elapsed: 102.287 sec checkfailure: -run-test-hdfs-fault-inject-withtestcaseonly: run-test-hdfs-fault-inject: BUILD FAILED /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build.xml:777: Tests failed! Total time: 61 minutes 22 seconds [FINDBUGS] Skipping publisher since build result is FAILURE Archiving artifacts Recording fingerprints Recording test results Publishing Javadoc Publishing Clover coverage report... No Clover report will be published due to a Build Failure Email was triggered for: Failure Sending email for trigger: Failure ################################################################################### ############################## FAILED TESTS (if any) ############################## 15 tests failed. REGRESSION: org.apache.hadoop.hdfs.server.datanode.TestDataDirs.testGetDataDirsFromURIs Error Message: org/apache/hadoop/fs/permission/FsPermission Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/permission/FsPermission at org.apache.hadoop.hdfs.server.datanode.TestDataDirs.testGetDataDirsFromURIs(TestDataDirs.java:42) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.fs.permission.FsPermission at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) FAILED: org.apache.hadoop.hdfs.server.namenode.TestGetImageServlet.initializationError Error Message: org/apache/hadoop/conf/Configuration Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration at java.lang.Class.getDeclaredMethods0(Native Method) at java.lang.Class.privateGetDeclaredMethods(Class.java:2427) at java.lang.Class.getDeclaredMethods(Class.java:1791) at java.lang.reflect.Constructor.newInstance(Constructor.java:513) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.conf.Configuration at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testReplication Error Message: org/apache/hadoop/io/Writable Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testReplication(TestINodeFile.java:47) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testReplicationBelowLowerBound Error Message: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Stack Trace: java.lang.Exception: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testReplicationBelowLowerBound(TestINodeFile.java:64) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSize Error Message: org/apache/hadoop/io/Writable Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSize(TestINodeFile.java:77) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeUpperBound Error Message: org/apache/hadoop/io/Writable Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeUpperBound(TestINodeFile.java:88) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeBelowLowerBound Error Message: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Stack Trace: java.lang.Exception: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeBelowLowerBound(TestINodeFile.java:105) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeAboveUpperBound Error Message: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Stack Trace: java.lang.Exception: Unexpected exception, expected<java.lang.IllegalArgumentException> but was<java.lang.NoClassDefFoundError> Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/io/Writable at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testPreferredBlockSizeAboveUpperBound(TestINodeFile.java:120) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.io.Writable at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testGetFullPathName Error Message: org/apache/hadoop/fs/permission/PermissionStatus Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/permission/PermissionStatus at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testGetFullPathName(TestINodeFile.java:127) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.fs.permission.PermissionStatus at java.net.URLClassLoader$1.run(URLClassLoader.java:202) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) REGRESSION: org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testAppendBlocks Error Message: org/apache/hadoop/fs/permission/PermissionStatus Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/permission/PermissionStatus at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.createINodeFiles(TestINodeFile.java:195) at org.apache.hadoop.hdfs.server.namenode.TestINodeFile.testAppendBlocks(TestINodeFile.java:157) FAILED: org.apache.hadoop.hdfs.TestHDFSServerPorts.testSecondaryNodePorts Error Message: Directory /test/dfs/namesecondary is in an inconsistent state: checkpoint directory does not exist or is not accessible. Stack Trace: org.apache.hadoop.hdfs.server.common.InconsistentFSStateException: Directory /test/dfs/namesecondary is in an inconsistent state: checkpoint directory does not exist or is not accessible. at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.recoverCreate(SecondaryNameNode.java:801) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.initialize(SecondaryNameNode.java:222) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.<init>(SecondaryNameNode.java:175) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.<init>(SecondaryNameNode.java:168) at org.apache.hadoop.hdfs.TestHDFSServerPorts.canStartSecondaryNode(TestHDFSServerPorts.java:224) at org.apache.hadoop.hdfs.TestHDFSServerPorts.testSecondaryNodePorts(TestHDFSServerPorts.java:350) FAILED: org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSeparateEditsDirLocking Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:626) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:541) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:257) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:243) at org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSeparateEditsDirLocking(TestCheckpoint.java:560) FAILED: org.apache.hadoop.hdfs.server.namenode.TestNNThroughputBenchmark.testNNThroughput Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestNNThroughputBenchmark.testNNThroughput(TestNNThroughputBenchmark.java:39) FAILED: org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatMatchingRPCandHttpPortsThrowException Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatMatchingRPCandHttpPortsThrowException(TestValidateConfigurationSettings.java:49) FAILED: org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatDifferentRPCandHttpPortsAreOK Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatDifferentRPCandHttpPortsAreOK(TestValidateConfigurationSettings.java:71)