See 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/3/>

------------------------------------------
[...truncated 12087 lines...]
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/MachineNode.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/MapTaskAttemptInfo.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/Node.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/Pair.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ParsedConfigFile.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ParsedHost.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ParsedLine.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/Pre21JobHistoryConstants.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/RackNode.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ReduceTaskAttemptInfo.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/TaskAttemptInfo.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/TaskInfo.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/TreePath.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ZombieCluster.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ZombieJob.java
hadoop-0.20.2-cdh3u3/src/tools/org/apache/hadoop/tools/rumen/ZombieJobProducer.java
hadoop-0.20.2-cdh3u3/src/webapps/datanode/browseBlock.jsp
hadoop-0.20.2-cdh3u3/src/webapps/datanode/browseDirectory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/datanode/tail.jsp
hadoop-0.20.2-cdh3u3/src/webapps/hdfs/dfshealth.jsp
hadoop-0.20.2-cdh3u3/src/webapps/hdfs/dfsnodelist.jsp
hadoop-0.20.2-cdh3u3/src/webapps/hdfs/index.html
hadoop-0.20.2-cdh3u3/src/webapps/hdfs/nn_browsedfscontent.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/analysejobhistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/index.html
hadoop-0.20.2-cdh3u3/src/webapps/job/job_authorization_error.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobblacklistedtrackers.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobconf.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobconf_history.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobdetails.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobdetailshistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobfailures.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobhistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobqueue_details.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobtasks.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobtaskshistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobtracker.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/jobtracker.jspx
hadoop-0.20.2-cdh3u3/src/webapps/job/loadhistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/machines.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/taskdetails.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/taskdetailshistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/taskstats.jsp
hadoop-0.20.2-cdh3u3/src/webapps/job/taskstatshistory.jsp
hadoop-0.20.2-cdh3u3/src/webapps/secondary/index.html
hadoop-0.20.2-cdh3u3/src/webapps/secondary/status.jsp
hadoop-0.20.2-cdh3u3/src/webapps/static/hadoop-logo.jpg
hadoop-0.20.2-cdh3u3/src/webapps/static/hadoop.css
hadoop-0.20.2-cdh3u3/src/webapps/static/images/favicon.ico
hadoop-0.20.2-cdh3u3/src/webapps/static/jobconf.xsl
hadoop-0.20.2-cdh3u3/src/webapps/static/jobtracker.js
hadoop-0.20.2-cdh3u3/src/webapps/static/sorttable.js
hadoop-0.20.2-cdh3u3/src/webapps/task/index.html
hadoop-0.20.2-cdh3u3/src/webapps/task/tasktracker.jsp
hadoop-0.20.2-cdh3u3/webapps/datanode/WEB-INF/web.xml
hadoop-0.20.2-cdh3u3/webapps/hdfs/WEB-INF/web.xml
hadoop-0.20.2-cdh3u3/webapps/hdfs/index.html
hadoop-0.20.2-cdh3u3/webapps/job/WEB-INF/web.xml
hadoop-0.20.2-cdh3u3/webapps/job/index.html
hadoop-0.20.2-cdh3u3/webapps/secondary/WEB-INF/web.xml
hadoop-0.20.2-cdh3u3/webapps/secondary/index.html
hadoop-0.20.2-cdh3u3/webapps/static/hadoop-logo.jpg
hadoop-0.20.2-cdh3u3/webapps/static/hadoop.css
hadoop-0.20.2-cdh3u3/webapps/static/images/favicon.ico
hadoop-0.20.2-cdh3u3/webapps/static/jobconf.xsl
hadoop-0.20.2-cdh3u3/webapps/static/jobtracker.js
hadoop-0.20.2-cdh3u3/webapps/static/sorttable.js
hadoop-0.20.2-cdh3u3/webapps/task/WEB-INF/web.xml
hadoop-0.20.2-cdh3u3/webapps/task/index.html
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/image/
hadoop-0.20.2-cdh3u3/bin/hadoop
hadoop-0.20.2-cdh3u3/bin/hadoop-config.sh
hadoop-0.20.2-cdh3u3/bin/hadoop-daemon.sh
hadoop-0.20.2-cdh3u3/bin/hadoop-daemons.sh
hadoop-0.20.2-cdh3u3/bin/rcc
hadoop-0.20.2-cdh3u3/bin/slaves.sh
hadoop-0.20.2-cdh3u3/bin/start-all.sh
hadoop-0.20.2-cdh3u3/bin/start-balancer.sh
hadoop-0.20.2-cdh3u3/bin/start-dfs.sh
hadoop-0.20.2-cdh3u3/bin/start-mapred.sh
hadoop-0.20.2-cdh3u3/bin/stop-all.sh
hadoop-0.20.2-cdh3u3/bin/stop-balancer.sh
hadoop-0.20.2-cdh3u3/bin/stop-dfs.sh
hadoop-0.20.2-cdh3u3/bin/stop-mapred.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/hdfsproxy
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/hdfsproxy-config.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/hdfsproxy-daemon.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/hdfsproxy-daemons.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/hdfsproxy-slaves.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/start-hdfsproxy.sh
hadoop-0.20.2-cdh3u3/contrib/hdfsproxy/bin/stop-hdfsproxy.sh
hadoop-0.20.2-cdh3u3/contrib/hod/bin/VERSION
hadoop-0.20.2-cdh3u3/contrib/hod/bin/checknodes
hadoop-0.20.2-cdh3u3/contrib/hod/bin/hod
hadoop-0.20.2-cdh3u3/contrib/hod/bin/hodcleanup
hadoop-0.20.2-cdh3u3/contrib/hod/bin/hodring
hadoop-0.20.2-cdh3u3/contrib/hod/bin/ringmaster
hadoop-0.20.2-cdh3u3/contrib/hod/bin/verify-account
hadoop-0.20.2-cdh3u3/contrib/vaidya/bin/vaidya.sh
hadoop-0.20.2-cdh3u3/src/c++/libhdfs/configure
hadoop-0.20.2-cdh3u3/src/c++/pipes/configure
hadoop-0.20.2-cdh3u3/src/c++/task-controller/configure
hadoop-0.20.2-cdh3u3/src/c++/utils/configure
hadoop-0.20.2-cdh3u3/src/contrib/cloud/src/py/hadoop-ec2
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/cmd-hadoop-cluster
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/create-hadoop-image
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/delete-hadoop-cluster
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/hadoop-ec2
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/hadoop-ec2-env.sh
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/hadoop-ec2-init-remote.sh
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/image/create-hadoop-image-remote
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/image/ec2-run-user-data
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/launch-hadoop-cluster
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/launch-hadoop-master
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/launch-hadoop-slaves
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/list-hadoop-clusters
hadoop-0.20.2-cdh3u3/src/contrib/ec2/bin/terminate-hadoop-cluster
hadoop-0.20.2-cdh3u3/src/examples/pipes/configure

Okay, now let's change into the hadoop-0.20.2-cdh3u3 directory in order to apply
some patches, copy in the Mesos specific code, and build everything.

  $ cd hadoop-0.20.2-cdh3u3



To run Hadoop on Mesos we need to apply a rather minor patch. The
patch makes a small number of modifications in Hadoop. (Note that the
changes to Hadoop have been committed in revisions r1033804 and
r987589 so at some point we won't need to apply any patch at all.)
We'll apply the patch with:

  $ patch -p1 <../hadoop-0.20.2-cdh3u3.patch


patching file src/mapred/org/apache/hadoop/mapred/JobInProgress.java
patching file src/mapred/org/apache/hadoop/mapred/Task.java
patching file src/mapred/org/apache/hadoop/mapred/TaskRunner.java
patching file src/mapred/org/apache/hadoop/mapred/TaskTracker.java
Hunk #4 succeeded at 2036 (offset 22 lines).
Hunk #5 succeeded at 2204 (offset 22 lines).
Hunk #6 succeeded at 2313 (offset 22 lines).
Hunk #7 succeeded at 2979 (offset 22 lines).
Hunk #8 succeeded at 3186 (offset 22 lines).
Hunk #9 succeeded at 3327 (offset 22 lines).
Hunk #10 succeeded at 3362 (offset 22 lines).
Hunk #11 succeeded at 3548 (offset 22 lines).
patching file 
src/mapred/org/apache/hadoop/mapred/TaskTrackerInstrumentation.java

Now we'll copy over the Mesos contrib components. In addition, we'll
need to edit ivy/libraries.properties and src/contrib/build.xml to
hook the Mesos contrib componenet into the build. We've included a
patch to do that for you:

  $ cp -r ../mesos src/contrib
  $ cp -p ../mesos-executor bin
  $ patch -p1 <../hadoop-0.20.2-cdh3u3_mesos.patch


patching file ivy/libraries.properties
patching file src/contrib/build.xml

Okay, now we're ready to build and then run Hadoop! There are a couple
important considerations. First, we need to locate the Mesos JAR and
native library (i.e., libmesos.so on Linux and libmesos.dylib on Mac
OS X). The Mesos JAR is used for both building and running, while the
native library is only used for running. In addition, we need to
locate the Protobuf JAR (if you don't already have one one your
default classpath).

This tutorial assumes you've built Mesos already. We'll use the
environment variable MESOS_BUILD_DIR to denote this directory.



Using 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build>
 as the build directory.


Now we'll copy over the necessary libraries we need from the build
directory.

  $ cp 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build/protobuf-2.4.1.jar>
 lib
  $ cp 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build/src/mesos-0.10.0.jar>
 lib
  $ mkdir -p lib/native/Linux-amd64-64
  $ cp 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build/src/.libs/libmesos.so>
 lib/native/Linux-amd64-64


Okay, let's try building Hadoop and the Mesos contrib classes:

  $ ant


Buildfile: 
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build/hadoop/hadoop-0.20.2-cdh3u3/build.xml>

BUILD FAILED
<https://builds.apache.org/job/Mesos-Trunk-Ubuntu-Hadoop-hadoop-0.20.2-cdh3u3/ws/build/hadoop/hadoop-0.20.2-cdh3u3/build.xml>:42:
 Execute failed: java.io.IOException: Cannot run program "mvn": 
java.io.IOException: error=2, No such file or directory
        at java.lang.ProcessBuilder.start(ProcessBuilder.java:460)
        at java.lang.Runtime.exec(Runtime.java:593)
        at 
org.apache.tools.ant.taskdefs.Execute$Java13CommandLauncher.exec(Execute.java:862)
        at org.apache.tools.ant.taskdefs.Execute.launch(Execute.java:481)
        at org.apache.tools.ant.taskdefs.Execute.execute(Execute.java:495)
        at org.apache.tools.ant.taskdefs.ExecTask.runExecute(ExecTask.java:631)
        at org.apache.tools.ant.taskdefs.ExecTask.runExec(ExecTask.java:672)
        at org.apache.tools.ant.taskdefs.ExecTask.execute(ExecTask.java:498)
        at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:291)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
        at org.apache.tools.ant.Task.perform(Task.java:348)
        at org.apache.tools.ant.taskdefs.Sequential.execute(Sequential.java:68)
        at net.sf.antcontrib.logic.IfTask.execute(IfTask.java:197)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
        at org.apache.tools.ant.TaskAdapter.execute(TaskAdapter.java:154)
        at org.apache.tools.ant.UnknownElement.execute(UnknownElement.java:291)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
        at java.lang.reflect.Method.invoke(Method.java:597)
        at 
org.apache.tools.ant.dispatch.DispatchUtils.execute(DispatchUtils.java:106)
        at org.apache.tools.ant.Task.perform(Task.java:348)
        at org.apache.tools.ant.Target.execute(Target.java:390)
        at 
org.apache.tools.ant.helper.ProjectHelper2.parse(ProjectHelper2.java:180)
        at 
org.apache.tools.ant.ProjectHelper.configureProject(ProjectHelper.java:82)
        at org.apache.tools.ant.Main.runBuild(Main.java:793)
        at org.apache.tools.ant.Main.startAnt(Main.java:217)
        at org.apache.tools.ant.launch.Launcher.run(Launcher.java:280)
        at org.apache.tools.ant.launch.Launcher.main(Launcher.java:109)
Caused by: java.io.IOException: java.io.IOException: error=2, No such file or 
directory
        at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
        at java.lang.ProcessImpl.start(ProcessImpl.java:65)
        at java.lang.ProcessBuilder.start(ProcessBuilder.java:453)
        ... 36 more

Total time: 7 seconds

Oh no! We failed to run 'ant'. If you need help try emailing:

  [email protected]

(Remember to include as much debug information as possible.)

make: *** [hadoop-0.20.2-cdh3u3] Error 1
Build step 'Execute shell' marked build as failure

Reply via email to