See 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/15/changes>

Changes:

[vinod] Updated CHANGELOG.

------------------------------------------
[...truncated 7723 lines...]
[ivy:resolve]   found junit#junit;4.8.1 in reactor-repo
[ivy:resolve]   found org.mortbay.jetty#jetty-util;6.1.26.cloudera.2 in 
cdh-releases
[ivy:resolve]   found org.mortbay.jetty#jetty;6.1.26.cloudera.2 in cdh-releases
[ivy:resolve]   found org.mortbay.jetty#servlet-api;2.5-20081211 in default
[ivy:resolve]   found commons-httpclient#commons-httpclient;3.1 in default
[ivy:resolve]   found log4j#log4j;1.2.16 in default
[ivy:resolve]   found commons-codec#commons-codec;1.4 in default
[ivy:resolve]   found org.codehaus.jackson#jackson-mapper-asl;1.0.1 in default
[ivy:resolve]   found org.codehaus.jackson#jackson-core-asl;1.0.1 in default
[ivy:resolve] :: resolution report :: resolve 568ms :: artifacts dl 6ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   11  |   9   |   0   |   0   ||   11  |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#streaming [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 11 already retrieved (0kB/4ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivysettings.xml

compile:
     [echo] contrib: streaming
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/src/contrib/build-contrib.xml:193:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/contrib/streaming/hadoop-streaming-2.0.0-mr1-cdh4.1.2.jar

compile-examples:

jar-examples:

package:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/streaming
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/streaming
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/streaming/lib
     [copy] Copying 5 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/streaming/lib

check-contrib:

init:
     [echo] contrib: thriftfs

init-contrib:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.2.0/ivy-2.2.0.jar
      [get] To: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivy-2.2.0.jar
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivysettings.xml

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: 
org.apache.hadoop#thriftfs;working@hemera
[ivy:resolve]   confs: [common]
[ivy:resolve]   found commons-logging#commons-logging;1.1.1 in default
[ivy:resolve]   found log4j#log4j;1.2.16 in default
[ivy:resolve] :: resolution report :: resolve 15ms :: artifacts dl 1ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   2   |   2   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#thriftfs [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivysettings.xml

compile:
     [echo] contrib: thriftfs
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/src/contrib/build-contrib.xml:193:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/contrib/thriftfs/hadoop-thriftfs-2.0.0-mr1-cdh4.1.2.jar

compile-examples:

jar-examples:

package:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/thriftfs
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/thriftfs
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/thriftfs/lib
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/thriftfs/lib

init:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.2.0/ivy-2.2.0.jar
      [get] To: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivy-2.2.0.jar
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivysettings.xml

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: 
org.apache.hadoop#vaidya;working@hemera
[ivy:resolve]   confs: [common]
[ivy:resolve]   found commons-logging#commons-logging;1.1.1 in default
[ivy:resolve]   found log4j#log4j;1.2.16 in default
[ivy:resolve] :: resolution report :: resolve 12ms :: artifacts dl 0ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   2   |   2   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#vaidya [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/ivy/ivysettings.xml

compile:
     [echo] contrib: vaidya
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/src/contrib/build-contrib.xml:193:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
     [echo] contrib: vaidya
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/contrib/vaidya/hadoop-vaidya-2.0.0-mr1-cdh4.1.2.jar

package:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/vaidya
     [copy] Copying 3 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/contrib/vaidya
     [copy] Copying 10 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/webapps
     [copy] Copying 5 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2
     [copy] Copying 9 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/bin
     [copy] Copying 4 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/.eclipse.templates
     [copy] Copying 17 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/conf
     [copy] Copying 17 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/example-confs
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2
     [copy] Copying 10 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/ivy
     [copy] Copying 4 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2
     [copy] Copying 6 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/c++
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/lib
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2
     [copy] Copying 1290 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/cloudera
     [copy] Copying 4 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop-2.0.0-mr1-cdh4.1.2/c++

BUILD SUCCESSFUL
Total time: 1 minute 45 seconds

To build the Mesos executor package, we first copy the
necessary Mesos libraries.


  $ cd build/hadoop-2.0.0-mr1-cdh4.1.2
  $ mkdir -p lib/native/Linux-amd64-64
  $ cp 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/src/.libs/libmesos.so
 lib/native/Linux-amd64-64



  We will remove Cloudera patches from the Mesos executor package
  to save space (~62MB).


  $ rm -rf cloudera



  Finally, we will build the Mesos executor package as follows:


  $ cd ..
  $ mv hadoop-2.0.0-mr1-cdh4.1.2 hadoop
  $ tar czf hadoop.tar.gz hadoop



Build success! Now let's run something!

Let's go ahead and try and start the JobTracker via:
  $ cd ..
  $ ./bin/hadoop jobtracker



JobTracker started at 1389.

Waiting 5 seconds for it to start. . . . . .
Alright, now let's run the "wordcount" example via:

  $ ./bin/hadoop jar hadoop-examples-2.0.0-mr1-cdh4.1.2.jar wordcount   
src/contrib/mesos/src/java/org/apache/hadoop/mapred out


SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in 
[jar:file:/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/lib/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in 
[jar:file:/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-hadoop-2.0.0-mr1-cdh4.1.2/build/hadoop/hadoop-2.0.0-mr1-cdh4.1.2/build/ivy/lib/Hadoop/common/slf4j-log4j12-1.6.1.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
13/05/22 09:25:07 ERROR security.UserGroupInformation: 
PriviledgedActionException as:jenkins (auth:SIMPLE) cause:java.io.IOException: 
Failed on local exception: java.io.IOException: Response is null.; Host Details 
: local host is: "hemera/140.211.11.27"; destination host is: 
"localhost":54311; 
java.io.IOException: Failed on local exception: java.io.IOException: Response 
is null.; Host Details : local host is: "hemera/140.211.11.27"; destination 
host is: "localhost":54311; 
        at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:759)
        at org.apache.hadoop.ipc.Client.call(Client.java:1164)
        at 
org.apache.hadoop.ipc.WritableRpcEngine$Invoker.invoke(WritableRpcEngine.java:225)
        at org.apache.hadoop.mapred.$Proxy6.getStagingAreaDir(Unknown Source)
        at 
org.apache.hadoop.mapred.JobClient.getStagingAreaDir(JobClient.java:1275)
        at 
org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:102)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:902)
        at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:896)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1332)
        at 
org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:896)
        at org.apache.hadoop.mapreduce.Job.submit(Job.java:531)
        at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:561)
        at org.apache.hadoop.examples.WordCount.main(WordCount.java:67)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:72)
        at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:144)
        at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:64)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:208)
Caused by: java.io.IOException: Response is null.
        at 
org.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:885)
        at org.apache.hadoop.ipc.Client$Connection.run(Client.java:813)

Oh no, it failed! Try running the JobTracker and wordcount
example manually ... it might be an issue with your environment that
this tutorial didn't cover (if you find this to be the case, please
create a JIRA for us and/or send us a code review).

./TUTORIAL.sh: line 662: kill: (1389) - No such process
make: *** [hadoop-2.0.0-mr1-cdh4.1.2] Error 1
Build step 'Execute shell' marked build as failure

Reply via email to