See 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/12/changes>

Changes:

[vinod] Upgraded libev to 4.15.

[vinod] Fixed python setup.py to use libev-4.15.

------------------------------------------
[...truncated 7723 lines...]
[ivy:resolve]   found commons-lang#commons-lang;2.4 in maven2
[ivy:resolve]   found commons-logging#commons-logging;1.1.1 in maven2
[ivy:resolve]   found commons-digester#commons-digester;1.8 in maven2
[ivy:resolve]   found commons-beanutils#commons-beanutils;1.7.0 in maven2
[ivy:resolve]   found commons-beanutils#commons-beanutils-core;1.8.0 in maven2
[ivy:resolve]   found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] :: resolution report :: resolve 90ms :: artifacts dl 6ms
[ivy:resolve]   :: evicted modules:
[ivy:resolve]   commons-logging#commons-logging;1.0.4 by 
[commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve]   commons-logging#commons-logging;1.0.3 by 
[commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve]   commons-logging#commons-logging;1.1 by 
[commons-logging#commons-logging;1.1.1] in [common]
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   25  |   0   |   0   |   3   ||   22  |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#streaming [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 22 already retrieved (0kB/3ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivysettings.xml

compile:
     [echo] contrib: streaming
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/src/contrib/build-contrib.xml:185:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/contrib/streaming/hadoop-streaming-0.20.205.0.jar

compile-examples:

jar-examples:

package:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/contrib/streaming
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/contrib/streaming

check-contrib:

init:
     [echo] contrib: thriftfs

init-contrib:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivy-2.1.0.jar
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivysettings.xml

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: 
org.apache.hadoop#thriftfs;working@hemera
[ivy:resolve]   confs: [common]
[ivy:resolve]   found commons-logging#commons-logging;1.0.4 in maven2
[ivy:resolve]   found log4j#log4j;1.2.15 in maven2
[ivy:resolve]   found commons-configuration#commons-configuration;1.6 in maven2
[ivy:resolve]   found commons-collections#commons-collections;3.2.1 in maven2
[ivy:resolve]   found commons-lang#commons-lang;2.4 in maven2
[ivy:resolve]   found commons-logging#commons-logging;1.1.1 in maven2
[ivy:resolve]   found commons-digester#commons-digester;1.8 in maven2
[ivy:resolve]   found commons-beanutils#commons-beanutils;1.7.0 in maven2
[ivy:resolve]   found commons-beanutils#commons-beanutils-core;1.8.0 in maven2
[ivy:resolve]   found org.apache.commons#commons-math;2.1 in maven2
[ivy:resolve] :: resolution report :: resolve 40ms :: artifacts dl 2ms
[ivy:resolve]   :: evicted modules:
[ivy:resolve]   commons-logging#commons-logging;1.0.4 by 
[commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve]   commons-logging#commons-logging;1.0.3 by 
[commons-logging#commons-logging;1.1.1] in [common]
[ivy:resolve]   commons-logging#commons-logging;1.1 by 
[commons-logging#commons-logging;1.1.1] in [common]
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   12  |   0   |   0   |   3   ||   9   |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#thriftfs [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 9 already retrieved (0kB/2ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivysettings.xml

compile:
     [echo] contrib: thriftfs
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/src/contrib/build-contrib.xml:185:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/contrib/thriftfs/hadoop-thriftfs-0.20.205.0.jar

compile-examples:

jar-examples:

package:
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/lib

init:

ivy-download:
      [get] Getting: 
http://repo2.maven.org/maven2/org/apache/ivy/ivy/2.1.0/ivy-2.1.0.jar
      [get] To: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivy-2.1.0.jar
      [get] Not modified - so not downloaded

ivy-probe-antlib:

ivy-init-antlib:

ivy-init:
[ivy:configure] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivysettings.xml

ivy-resolve-common:
[ivy:resolve] :: resolving dependencies :: 
org.apache.hadoop#vaidya;working@hemera
[ivy:resolve]   confs: [common]
[ivy:resolve]   found commons-logging#commons-logging;1.0.4 in maven2
[ivy:resolve]   found log4j#log4j;1.2.15 in maven2
[ivy:resolve] :: resolution report :: resolve 10ms :: artifacts dl 0ms
        ---------------------------------------------------------------------
        |                  |            modules            ||   artifacts   |
        |       conf       | number| search|dwnlded|evicted|| number|dwnlded|
        ---------------------------------------------------------------------
        |      common      |   2   |   0   |   0   |   0   ||   2   |   0   |
        ---------------------------------------------------------------------

ivy-retrieve-common:
[ivy:retrieve] :: retrieving :: org.apache.hadoop#vaidya [sync]
[ivy:retrieve]  confs: [common]
[ivy:retrieve]  0 artifacts copied, 2 already retrieved (0kB/1ms)
[ivy:cachepath] DEPRECATED: 'ivy.conf.file' is deprecated, use 
'ivy.settings.file' instead
[ivy:cachepath] :: loading settings :: file = 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/ivy/ivysettings.xml

compile:
     [echo] contrib: vaidya
    [javac] 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/src/contrib/build-contrib.xml:185:
 warning: 'includeantruntime' was not set, defaulting to 
build.sysclasspath=last; set to false for repeatable builds

jar:
     [echo] contrib: vaidya
      [jar] Building jar: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/contrib/vaidya/hadoop-vaidya-0.20.205.0.jar

package:
    [mkdir] Created dir: 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/contrib/vaidya
     [copy] Copying 3 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/contrib/vaidya
     [copy] Copying 35 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/webapps
     [copy] Copied 13 empty directories to 2 empty directories under 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop/webapps
     [copy] Copying 5 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/hadoop
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/bin
     [copy] Copying 16 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/sbin
     [copy] Copying 1 file to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/libexec
     [copy] Copying 16 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/etc/hadoop
     [copy] Copying 4 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/share/doc/hadoop
     [copy] Copying 7 files to 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/hadoop/hadoop-0.20.205.0/build/hadoop-0.20.205.0/sbin

BUILD SUCCESSFUL
Total time: 47 seconds

To build the Mesos executor package, we first copy the
necessary Mesos libraries.


  $ cd build/hadoop-0.20.205.0
  $ mkdir -p lib/native/Linux-amd64-64
  $ cp 
/x1/jenkins/jenkins-slave/workspace/Mesos-Trunk-Ubuntu-Hadoop-0.20.205.0/build/src/.libs/libmesos.so
 lib/native/Linux-amd64-64



  Finally, we will build the Mesos executor package as follows:


  $ cd ..
  $ mv hadoop-0.20.205.0 hadoop
  $ tar czf hadoop.tar.gz hadoop



Build success! Now let's run something!

Let's go ahead and try and start the JobTracker via:
  $ cd ..
  $ ./bin/hadoop jobtracker



JobTracker started at 14205.

Waiting 5 seconds for it to start. . . . . .
Alright, now let's run the "wordcount" example via:

  $ ./bin/hadoop jar hadoop-examples-0.20.205.0.jar wordcount   
src/contrib/mesos/src/java/org/apache/hadoop/mapred out


13/05/21 23:59:30 WARN util.NativeCodeLoader: Unable to load native-hadoop 
library for your platform... using builtin-java classes where applicable
13/05/21 23:59:30 INFO input.FileInputFormat: Total input paths to process : 2
13/05/21 23:59:30 INFO mapred.JobClient: Running job: job_201305212314_0002
13/05/21 23:59:31 INFO mapred.JobClient:  map 0% reduce 0%
java.io.IOException: Call to localhost/127.0.0.1:54311 failed on local 
exception: java.io.IOException: Connection reset by peer
        at org.apache.hadoop.ipc.Client.wrapException(Client.java:1103)
        at org.apache.hadoop.ipc.Client.call(Client.java:1071)
        at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:225)
        at org.apache.hadoop.mapred.$Proxy1.getTaskCompletionEvents(Unknown 
Source)
        at 
org.apache.hadoop.mapred.JobClient$NetworkedJob.getTaskCompletionEvents(JobClient.java:385)
        at 
org.apache.hadoop.mapred.JobClient.monitorAndPrintJob(JobClient.java:1297)
        at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:498)
        at org.apache.hadoop.examples.WordCount.main(WordCount.java:67)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:68)
        at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:139)
        at org.apache.hadoop.examples.ExampleDriver.main(ExampleDriver.java:64)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at org.apache.hadoop.util.RunJar.main(RunJar.java:156)
Caused by: java.io.IOException: Connection reset by peer
        at sun.nio.ch.FileDispatcher.read0(Native Method)
        at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
        at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:233)
        at sun.nio.ch.IOUtil.read(IOUtil.java:206)
        at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:236)
        at 
org.apache.hadoop.net.SocketInputStream$Reader.performIO(SocketInputStream.java:55)
        at 
org.apache.hadoop.net.SocketIOWithTimeout.doIO(SocketIOWithTimeout.java:142)
        at 
org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:155)
        at 
org.apache.hadoop.net.SocketInputStream.read(SocketInputStream.java:128)
        at java.io.FilterInputStream.read(FilterInputStream.java:116)
        at 
org.apache.hadoop.ipc.Client$Connection$PingInputStream.read(Client.java:342)
        at java.io.BufferedInputStream.fill(BufferedInputStream.java:218)
        at java.io.BufferedInputStream.read(BufferedInputStream.java:237)
        at java.io.DataInputStream.readInt(DataInputStream.java:370)
        at 
org.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:800)
        at org.apache.hadoop.ipc.Client$Connection.run(Client.java:745)

Oh no, it failed! Try running the JobTracker and wordcount
example manually ... it might be an issue with your environment that
this tutorial didn't cover (if you find this to be the case, please
create a JIRA for us and/or send us a code review).

./TUTORIAL.sh: line 662: kill: (14205) - No such process
make: *** [hadoop-0.20.205.0] Error 1
Build step 'Execute shell' marked build as failure

Reply via email to