See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/1410/changes>

Changes:

[suresh] Update hadoop-common CHANGES.txt after merging HADOOP-8562

[vinodkv] MAPREDUCE-5270. Migrated MR app from using BuilderUtil factory 
methods to individual record factory methods. Contributed by Jian He.

------------------------------------------
[...truncated 14268 lines...]
        at 
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:115)
        at 
org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:75)

testOperationDoAs[4](org.apache.hadoop.fs.http.client.TestHttpFSWithHttpFSFileSystem)
  Time elapsed: 380 sec  <<< ERROR!
org.apache.hadoop.ipc.RemoteException(java.io.IOException): Specified block 
size is less than configured minimum value 
(dfs.namenode.fs-limits.min-block-size): 1024 < 1048576
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:1849)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:1818)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:459)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:300)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java:47995)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:527)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1033)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1842)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1838)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1489)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1836)

        at org.apache.hadoop.ipc.Client.call(Client.java:1303)
        at org.apache.hadoop.ipc.Client.call(Client.java:1255)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:204)
        at $Proxy19.create(Unknown Source)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:163)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:82)
        at $Proxy19.create(Unknown Source)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:227)
        at 
org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1375)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1285)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1210)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:298)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:266)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:83)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:888)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:869)
        at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:233)
        at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:219)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.testConcat(BaseTestHttpFSWith.java:220)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.operation(BaseTestHttpFSWith.java:498)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.access$100(BaseTestHttpFSWith.java:62)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith$1.run(BaseTestHttpFSWith.java:572)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith$1.run(BaseTestHttpFSWith.java:569)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1489)
        at 
org.apache.hadoop.fs.http.client.BaseTestHttpFSWith.testOperationDoAs(BaseTestHttpFSWith.java:569)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
        at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
        at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
        at 
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
        at 
org.apache.hadoop.test.TestHdfsHelper$HdfsStatement.evaluate(TestHdfsHelper.java:74)
        at 
org.apache.hadoop.test.TestDirHelper$1.evaluate(TestDirHelper.java:106)
        at 
org.apache.hadoop.test.TestDirHelper$1.evaluate(TestDirHelper.java:106)
        at 
org.apache.hadoop.test.TestJettyHelper$1.evaluate(TestJettyHelper.java:53)
        at 
org.apache.hadoop.test.TestExceptionHelper$1.evaluate(TestExceptionHelper.java:42)
        at 
org.junit.runners.BlockJUnit4ClassRunner.runNotIgnored(BlockJUnit4ClassRunner.java:79)
        at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:71)
        at 
org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:49)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
        at org.junit.runners.Suite.runChild(Suite.java:128)
        at org.junit.runners.Suite.runChild(Suite.java:24)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:236)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:252)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:141)
        at 
org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:112)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.maven.surefire.util.ReflectionUtils.invokeMethodWithArray(ReflectionUtils.java:189)
        at 
org.apache.maven.surefire.booter.ProviderFactory$ProviderProxy.invoke(ProviderFactory.java:165)
        at 
org.apache.maven.surefire.booter.ProviderFactory.invokeProvider(ProviderFactory.java:85)
        at 
org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:115)
        at 
org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:75)

Running org.apache.hadoop.fs.http.client.TestHttpFSFileSystemLocalFileSystem
Tests run: 32, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.575 sec

Results :

Tests in error: 
  
testOperation[4](org.apache.hadoop.fs.http.client.TestHttpFSFWithWebhdfsFileSystem):
 Specified block size is less than configured minimum value 
(dfs.namenode.fs-limits.min-block-size): 1024 < 1048576(..)
  
testOperationDoAs[4](org.apache.hadoop.fs.http.client.TestHttpFSFWithWebhdfsFileSystem):
 Specified block size is less than configured minimum value 
(dfs.namenode.fs-limits.min-block-size): 1024 < 1048576(..)
  
testOperation[4](org.apache.hadoop.fs.http.client.TestHttpFSWithHttpFSFileSystem):
 Specified block size is less than configured minimum value 
(dfs.namenode.fs-limits.min-block-size): 1024 < 1048576(..)
  
testOperationDoAs[4](org.apache.hadoop.fs.http.client.TestHttpFSWithHttpFSFileSystem):
 Specified block size is less than configured minimum value 
(dfs.namenode.fs-limits.min-block-size): 1024 < 1048576(..)

Tests run: 286, Failures: 0, Errors: 4, Skipped: 0

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS BookKeeper Journal 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[WARNING] The POM for org.eclipse.m2e:lifecycle-mapping:jar:1.0.0 is missing, 
no dependency information available
[WARNING] Failed to retrieve plugin descriptor for 
org.eclipse.m2e:lifecycle-mapping:1.0.0: Plugin 
org.eclipse.m2e:lifecycle-mapping:1.0.0 or one of its dependencies could not be 
resolved: Failed to read artifact descriptor for 
org.eclipse.m2e:lifecycle-mapping:jar:1.0.0
[INFO] 
[INFO] --- maven-clean-plugin:2.4.1:clean (default-clean) @ 
hadoop-hdfs-bkjournal ---
[INFO] Deleting 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-testdirs) @ 
hadoop-hdfs-bkjournal ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- hadoop-maven-plugins:3.0.0-SNAPSHOT:protoc (compile-protoc) @ 
hadoop-hdfs-bkjournal ---
[INFO] 
[INFO] --- maven-resources-plugin:2.2:resources (default-resources) @ 
hadoop-hdfs-bkjournal ---
[INFO] Using default encoding to copy filtered resources.
[INFO] 
[INFO] --- maven-compiler-plugin:2.5.1:compile (default-compile) @ 
hadoop-hdfs-bkjournal ---
[INFO] Compiling 7 source files to 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target/classes>
[INFO] 
[INFO] --- maven-resources-plugin:2.2:testResources (default-testResources) @ 
hadoop-hdfs-bkjournal ---
[INFO] Using default encoding to copy filtered resources.
[INFO] 
[INFO] --- maven-compiler-plugin:2.5.1:testCompile (default-testCompile) @ 
hadoop-hdfs-bkjournal ---
[INFO] Compiling 8 source files to 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target/test-classes>
[INFO] 
[INFO] --- maven-surefire-plugin:2.12.3:test (default-test) @ 
hadoop-hdfs-bkjournal ---
[INFO] Surefire report directory: 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target/surefire-reports>

-------------------------------------------------------
 T E S T S
-------------------------------------------------------

-------------------------------------------------------
 T E S T S
-------------------------------------------------------
Running org.apache.hadoop.contrib.bkjournal.TestBookKeeperAsHASharedDir
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.756 sec
Running org.apache.hadoop.contrib.bkjournal.TestBookKeeperEditLogStreams
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.678 sec
Running org.apache.hadoop.contrib.bkjournal.TestBookKeeperHACheckpoints
Tests run: 5, Failures: 1, Errors: 0, Skipped: 0, Time elapsed: 28.038 sec <<< 
FAILURE!
testStandbyExceptionThrownDuringCheckpoint(org.apache.hadoop.contrib.bkjournal.TestBookKeeperHACheckpoints)
  Time elapsed: 9970 sec  <<< FAILURE!
java.lang.AssertionError: SBN should have still been checkpointing.
        at org.junit.Assert.fail(Assert.java:91)
        at org.junit.Assert.assertTrue(Assert.java:43)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyCheckpoints.testStandbyExceptionThrownDuringCheckpoint(TestStandbyCheckpoints.java:279)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44)
        at 
org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15)
        at 
org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41)
        at 
org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20)
        at 
org.junit.internal.runners.statements.FailOnTimeout$1.run(FailOnTimeout.java:28)

Running org.apache.hadoop.contrib.bkjournal.TestCurrentInprogress
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.689 sec
Running org.apache.hadoop.contrib.bkjournal.TestBookKeeperConfiguration
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.09 sec
Running org.apache.hadoop.contrib.bkjournal.TestBookKeeperJournalManager
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.497 sec

Results :

Failed tests:   
testStandbyExceptionThrownDuringCheckpoint(org.apache.hadoop.contrib.bkjournal.TestBookKeeperHACheckpoints):
 SBN should have still been checkpointing.

Tests run: 32, Failures: 1, Errors: 0, Skipped: 0

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[WARNING] The POM for org.eclipse.m2e:lifecycle-mapping:jar:1.0.0 is missing, 
no dependency information available
[WARNING] Failed to retrieve plugin descriptor for 
org.eclipse.m2e:lifecycle-mapping:1.0.0: Plugin 
org.eclipse.m2e:lifecycle-mapping:1.0.0 or one of its dependencies could not be 
resolved: Failed to read artifact descriptor for 
org.eclipse.m2e:lifecycle-mapping:jar:1.0.0
[INFO] 
[INFO] --- maven-clean-plugin:2.4.1:clean (default-clean) @ hadoop-hdfs-project 
---
[INFO] Deleting 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.6:run (create-testdirs) @ hadoop-hdfs-project 
---
[INFO] Executing tasks

main:
    [mkdir] Created dir: 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:test-jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.0:enforce (dist-enforce) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.0:attach-descriptor (attach-descriptor) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ 
hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable 
package
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.6:checkstyle (default-cli) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:2.3.2:findbugs (default-cli) @ 
hadoop-hdfs-project ---
[INFO] ****** FindBugsMojo execute *******
[INFO] canGenerate is false
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ SUCCESS 
[1:36:12.956s]
[INFO] Apache Hadoop HttpFS .............................. FAILURE [1:45.208s]
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. FAILURE [56.376s]
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [0.050s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1:38:55.359s
[INFO] Finished at: Sat May 25 13:12:15 UTC 2013
[INFO] Final Memory: 51M/778M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.12.3:test (default-test) on 
project hadoop-hdfs-httpfs: There are test failures.
[ERROR] 
[ERROR] Please refer to 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/surefire-reports>
 for the individual test results.
[ERROR] -> [Help 1]
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.12.3:test (default-test) on 
project hadoop-hdfs-bkjournal: There are test failures.
[ERROR] 
[ERROR] Please refer to 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/trunk/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/target/surefire-reports>
 for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs-httpfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Updating MAPREDUCE-5270
Updating HADOOP-8562

Reply via email to