See https://builds.apache.org/job/Hadoop-Hdfs-trunk/2207/
################################################################################### ########################## LAST 60 LINES OF THE CONSOLE ########################### [...truncated 7544 lines...] [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project --- [INFO] [INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project --- [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Apache Hadoop HDFS Client ......................... SUCCESS [03:24 min] [INFO] Apache Hadoop HDFS ................................ FAILURE [ 02:54 h] [INFO] Apache Hadoop HttpFS .............................. SKIPPED [INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED [INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED [INFO] Apache Hadoop HDFS Project ........................ SUCCESS [ 0.061 s] [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 02:58 h [INFO] Finished at: 2015-08-07T14:33:22+00:00 [INFO] Final Memory: 66M/769M [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hadoop-hdfs Build step 'Execute shell' marked build as failure Archiving artifacts Sending artifact delta relative to Hadoop-Hdfs-trunk #2199 Archived 1 artifacts Archive block size is 32768 Received 0 blocks and 3861697 bytes Compression is 0.0% Took 13 sec Recording test results Updating MAPREDUCE-6257 Updating HDFS-8856 Updating HDFS-8499 Updating HDFS-8623 Updating MAPREDUCE-6443 Updating YARN-3974 Updating YARN-3948 Updating YARN-4019 Sending e-mails to: hdfs-dev@hadoop.apache.org Email was triggered for: Failure Sending email for trigger: Failure ################################################################################### ############################## FAILED TESTS (if any) ############################## 16 tests failed. REGRESSION: org.apache.hadoop.fs.TestGlobPaths.testLocalFilesystem Error Message: null Stack Trace: java.lang.AssertionError: null at org.junit.Assert.fail(Assert.java:86) at org.junit.Assert.assertTrue(Assert.java:41) at org.junit.Assert.assertTrue(Assert.java:52) at org.apache.hadoop.fs.TestGlobPaths.testLocalFilesystem(TestGlobPaths.java:1309) FAILED: org.apache.hadoop.fs.TestSWebHdfsFileContextMainOperations.org.apache.hadoop.fs.TestSWebHdfsFileContextMainOperations Error Message: fs.AbstractFileSystem.swebhdfs.impl=null: No AbstractFileSystem configured for scheme: swebhdfs Stack Trace: org.apache.hadoop.fs.UnsupportedFileSystemException: fs.AbstractFileSystem.swebhdfs.impl=null: No AbstractFileSystem configured for scheme: swebhdfs at org.apache.hadoop.fs.AbstractFileSystem.createFileSystem(AbstractFileSystem.java:161) at org.apache.hadoop.fs.AbstractFileSystem.get(AbstractFileSystem.java:250) at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:325) at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:322) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1666) at org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:322) at org.apache.hadoop.fs.FileContext.getFileContext(FileContext.java:439) at org.apache.hadoop.fs.TestSWebHdfsFileContextMainOperations.clusterSetupAtBeginning(TestSWebHdfsFileContextMainOperations.java:89) FAILED: org.apache.hadoop.fs.TestWebHdfsFileContextMainOperations.org.apache.hadoop.fs.TestWebHdfsFileContextMainOperations Error Message: fs.AbstractFileSystem.webhdfs.impl=null: No AbstractFileSystem configured for scheme: webhdfs Stack Trace: org.apache.hadoop.fs.UnsupportedFileSystemException: fs.AbstractFileSystem.webhdfs.impl=null: No AbstractFileSystem configured for scheme: webhdfs at org.apache.hadoop.fs.AbstractFileSystem.createFileSystem(AbstractFileSystem.java:161) at org.apache.hadoop.fs.AbstractFileSystem.get(AbstractFileSystem.java:250) at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:325) at org.apache.hadoop.fs.FileContext$2.run(FileContext.java:322) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1666) at org.apache.hadoop.fs.FileContext.getAbstractFileSystem(FileContext.java:322) at org.apache.hadoop.fs.FileContext.getFileContext(FileContext.java:439) at org.apache.hadoop.fs.TestWebHdfsFileContextMainOperations.clusterSetupAtBeginning(TestWebHdfsFileContextMainOperations.java:76) REGRESSION: org.apache.hadoop.fs.contract.hdfs.TestHDFSContractAppend.testRenameFileBeingAppended Error Message: renamed destination file does not exist: not found hdfs://localhost:33026/test/test/renamed in hdfs://localhost:33026/test/test Stack Trace: java.io.FileNotFoundException: renamed destination file does not exist: not found hdfs://localhost:33026/test/test/renamed in hdfs://localhost:33026/test/test at org.apache.hadoop.fs.contract.ContractTestUtils.assertPathExists(ContractTestUtils.java:688) at org.apache.hadoop.fs.contract.AbstractFSContractTestBase.assertPathExists(AbstractFSContractTestBase.java:279) at org.apache.hadoop.fs.contract.AbstractContractAppendTest.testRenameFileBeingAppended(AbstractContractAppendTest.java:120) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) REGRESSION: org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.testContains Error Message: org/apache/hadoop/fs/StorageType Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/StorageType at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1181) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1186) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1100) at org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.setup(TestHost2NodesMap.java:36) REGRESSION: org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.testGetDatanodeByHost Error Message: org/apache/hadoop/fs/StorageType Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/StorageType at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1181) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1186) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1100) at org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.setup(TestHost2NodesMap.java:36) REGRESSION: org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.testRemove Error Message: org/apache/hadoop/fs/StorageType Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/fs/StorageType at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1181) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1186) at org.apache.hadoop.hdfs.DFSTestUtil.getDatanodeDescriptor(DFSTestUtil.java:1100) at org.apache.hadoop.hdfs.server.blockmanagement.TestHost2NodesMap.setup(TestHost2NodesMap.java:36) REGRESSION: org.apache.hadoop.hdfs.server.blockmanagement.TestNodeCount.testNodeCount Error Message: Timeout: excess replica count not equal to 2 for block blk_1073741825_1001 after 20000 msec. Last counts: live = 2, excess = 0, corrupt = 0 Stack Trace: java.util.concurrent.TimeoutException: Timeout: excess replica count not equal to 2 for block blk_1073741825_1001 after 20000 msec. Last counts: live = 2, excess = 0, corrupt = 0 at org.apache.hadoop.hdfs.server.blockmanagement.TestNodeCount.checkTimeout(TestNodeCount.java:152) at org.apache.hadoop.hdfs.server.blockmanagement.TestNodeCount.checkTimeout(TestNodeCount.java:146) at org.apache.hadoop.hdfs.server.blockmanagement.TestNodeCount.testNodeCount(TestNodeCount.java:130) REGRESSION: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails Error Message: org/apache/hadoop/io/retry/MultiException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/retry/MultiException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.server.namenode.ha.RequestHedgingProxyProvider.getProxy(RequestHedgingProxyProvider.java:173) at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails(TestRequestHedgingProxyProvider.java:76) REGRESSION: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenBothFail Error Message: org/apache/hadoop/io/retry/MultiException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/retry/MultiException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.server.namenode.ha.RequestHedgingProxyProvider.getProxy(RequestHedgingProxyProvider.java:173) at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenBothFail(TestRequestHedgingProxyProvider.java:117) REGRESSION: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneIsSlow Error Message: org/apache/hadoop/io/retry/MultiException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/retry/MultiException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.server.namenode.ha.RequestHedgingProxyProvider.getProxy(RequestHedgingProxyProvider.java:173) at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneIsSlow(TestRequestHedgingProxyProvider.java:98) REGRESSION: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testPerformFailoverWith3Proxies Error Message: org/apache/hadoop/io/retry/MultiException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/retry/MultiException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.server.namenode.ha.RequestHedgingProxyProvider.getProxy(RequestHedgingProxyProvider.java:173) at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testPerformFailoverWith3Proxies(TestRequestHedgingProxyProvider.java:264) REGRESSION: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testPerformFailover Error Message: org/apache/hadoop/io/retry/MultiException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/io/retry/MultiException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.hdfs.server.namenode.ha.RequestHedgingProxyProvider.getProxy(RequestHedgingProxyProvider.java:173) at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testPerformFailover(TestRequestHedgingProxyProvider.java:157) REGRESSION: org.apache.hadoop.security.TestPermission.testBackwardCompatibility Error Message: Unable to parse configuration fs.permissions.umask-mode with value 0022 as octal or symbolic umask. Stack Trace: java.lang.IllegalArgumentException: Unable to parse configuration fs.permissions.umask-mode with value 0022 as octal or symbolic umask. at org.apache.hadoop.fs.permission.FsPermission.getUMask(FsPermission.java:264) at org.apache.hadoop.security.TestPermission.testBackwardCompatibility(TestPermission.java:108) FAILED: org.apache.hadoop.tracing.TestTracing.org.apache.hadoop.tracing.TestTracing Error Message: org/apache/hadoop/tracing/SetSpanReceiver Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/tracing/SetSpanReceiver at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.tracing.TestTracing.setup(TestTracing.java:192) REGRESSION: org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead.testShortCircuitTraceHooks Error Message: org/apache/hadoop/tracing/SetSpanReceiver Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/tracing/SetSpanReceiver at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead.testShortCircuitTraceHooks(TestTracingShortCircuitLocalRead.java:67)