Author: gkesavan
Date: Mon Jun 1 15:26:07 2009
New Revision: 780681
URL: http://svn.apache.org/viewvc?rev=780681&view=rev
Log:
Fix jdiff targets and build.xml cleanup
Added:
hadoop/core/branches/HADOOP-4687/hdfs/lib/jdiff/
hadoop/core/branches/HADOOP-4687/hdfs/lib/jdiff/hadoop-hdfs_0.20.0.xml
Modified:
hadoop/core/branches/HADOOP-4687/hdfs/build.xml
Modified: hadoop/core/branches/HADOOP-4687/hdfs/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/branches/HADOOP-4687/hdfs/build.xml?rev=780681&r1=780680&r2=780681&view=diff
==============================================================================
--- hadoop/core/branches/HADOOP-4687/hdfs/build.xml (original)
+++ hadoop/core/branches/HADOOP-4687/hdfs/build.xml Mon Jun 1 15:26:07 2009
@@ -31,7 +31,7 @@
<property name="final.name" value="${name}-${version}"/>
<property name="test.hdfs.final.name" value="${name}-test-${version}"/>
<property name="test.hdfswithmr.final.name"
value="${name}-hdsfwithmr-test-${version}"/>
- <property name="test.final.name" value="${name}-test-${version}"/>
+ <property name="ant.final.name" value="${name}-ant-${version}"/>
<property name="year" value="2009"/>
<property name="src.dir" value="${basedir}/src"/>
@@ -44,7 +44,6 @@
<property name="src.docs.cn" value="${basedir}/src/docs/cn"/>
<property name="changes.src" value="${docs.src}/changes"/>
- <property name="xercescroot" value=""/>
<property name="build.dir" value="${basedir}/build"/>
<property name="build.classes" value="${build.dir}/classes"/>
<property name="build.src" value="${build.dir}/src"/>
@@ -119,7 +118,7 @@
<property name="jdiff.xml.dir" value="${lib.dir}/jdiff"/>
<property name="jdiff.stable" value="0.20.0"/>
<property name="jdiff.stable.javadoc"
- value="http://hadoop.apache.org/core/docs/r${jdiff.stable}/api/"/>
+ value="http://hadoop.apache.org/hdfs/docs/r${jdiff.stable}/api/"/>
<property name="scratch.dir" value="${user.home}/tmp"/>
<property name="svn.cmd" value="svn"/>
@@ -141,14 +140,14 @@
<property name="common.ivy.lib.dir"
location="${build.ivy.lib.dir}/${ant.project.name}/common"/>
<property name="build.ivy.report.dir" location="${build.ivy.dir}/report" />
<property name="build.ivy.maven.dir" location="${build.ivy.dir}/maven" />
- <property name="build.ivy.maven.pom"
location="${build.ivy.maven.dir}/hadoop-core-${version}.pom" />
- <property name="build.ivy.maven.jar"
location="${build.ivy.maven.dir}/hadoop-core-${version}.jar" />
+ <property name="build.ivy.maven.pom"
location="${build.ivy.maven.dir}/hadoop-hdfs-${version}.pom" />
+ <property name="build.ivy.maven.jar"
location="${build.ivy.maven.dir}/hadoop-hdfs-${version}.jar" />
<!--this is the naming policy for artifacts we want pulled down-->
<property name="ivy.artifact.retrieve.pattern"
value="${ant.project.name}/[conf]/[artifact]-[revision].[ext]"/>
<!--this is how artifacts that get built are named-->
- <property name="ivy.publish.pattern" value="hadoop-[revision]-core.[ext]"/>
+ <property name="ivy.publish.pattern" value="hadoop-hdfs-[revision].[ext]"/>
<property name="hadoop-hdfs.jar" location="${build.dir}/${final.name}.jar" />
<!-- jdiff.home property set -->
@@ -186,7 +185,7 @@
<pathelement location="${build.examples}"/>
<pathelement location="${build.tools}"/>
<pathelement path="${clover.jar}"/>
- <path refid="ivy.test.classpath"/>
+ <path refid="ivy-test.classpath"/>
<fileset dir="${lib.dir}">
<include name="hadoop-core-test-${version}.jar" />
<exclude name="**/excluded/" />
@@ -200,6 +199,7 @@
<pathelement location="${lib.dir}/hadoop-mapred-test-${version}.jar" />
<pathelement location="${lib.dir}/hadoop-mapred-${version}.jar" />
<pathelement location="${lib.dir}/hadoop-mapred-tools-${version}.jar" />
+ <pathelement location="${lib.dir}/hadoop-mapred-examples-${version}.jar" />
</path>
<!-- the cluster test classpath: uses conf.dir for configuration -->
@@ -210,9 +210,6 @@
<pathelement location="${build.dir}"/>
</path>
- <!-- properties dependent on the items defined above. -->
- <!--<available classname="${rat.reporting.classname}"
classpathref="classpath" property="rat.present" value="true"/> -->
-
<!-- ====================================================== -->
<!-- Macro definitions -->
<!-- ====================================================== -->
@@ -244,7 +241,6 @@
<mkdir dir="${test.build.extraconf}"/>
<tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
<touch millis="0" file="${touch.temp.file}">
- <fileset dir="${conf.dir}" includes="**/*.template"/>
<fileset dir="${contrib.dir}" includes="**/*.template"/>
</touch>
<delete file="${touch.temp.file}"/>
@@ -259,96 +255,8 @@
<!-- ====================================================== -->
<!-- Compile the Java files -->
<!-- ====================================================== -->
- <target name="record-parser" depends="init" if="javacc.home">
- <javacc
-
target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
-
outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
- javacchome="${javacc.home}" />
- </target>
-
- <target name="compile-rcc-compiler" depends="init, record-parser">
- <javac
- encoding="${build.encoding}"
- srcdir="${hdfs.src.dir}"
- includes="org/apache/hadoop/record/compiler/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}"/>
- <classpath refid="classpath"/>
- </javac>
-
- <taskdef name="recordcc"
classname="org.apache.hadoop.record.compiler.ant.RccTask">
- <classpath refid="classpath" />
- </taskdef>
- </target>
-
- <target name="compile-core-classes" depends="init, compile-rcc-compiler">
- <taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
- <classpath refid="test.classpath"/>
- </taskdef>
- <!-- Compile Java files (excluding JSPs) checking warnings -->
- <javac
- encoding="${build.encoding}"
- srcdir="${core.src.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.classes}">
- <fileset dir="${core.src.dir}" includes="**/*.properties"/>
- <fileset dir="${core.src.dir}" includes="core-default.xml"/>
- </copy>
-
- </target>
-<!--
- <target name="compile-mapred-classes" depends="compile-core-classes">
- <jsp-compile
- uriroot="${src.webapps}/task"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/task/WEB-INF/web.xml">
- </jsp-compile>
-
- <jsp-compile
- uriroot="${src.webapps}/job"
- outputdir="${build.src}"
- package="org.apache.hadoop.mapred"
- webxml="${build.webapps}/job/WEB-INF/web.xml">
- </jsp-compile>
- <javac
- encoding="${build.encoding}"
- srcdir="${mapred.src.dir};${build.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.classes}">
- <fileset dir="${mapred.src.dir}" includes="**/*.properties"/>
- <fileset dir="${mapred.src.dir}" includes="mapred-default.xml"/>
- </copy>
- </target>
--->
- <!--<target name="compile-hdfs-classes" depends="compile-core-classes">-->
- <target name="compile-hdfs-classes" depends="init, compile-rcc-compiler">
+ <target name="compile-hdfs-classes" depends="init">
<taskdef classname="org.apache.jasper.JspC" name="jsp-compile" >
<classpath refid="classpath"/>
</taskdef>
@@ -393,78 +301,8 @@
<fileset dir="${hdfs.src.dir}" includes="hdfs-default.xml"/>
</copy>
</target>
-<!--
- <target name="compile-tools" depends="init">
- <javac
- encoding="${build.encoding}"
- srcdir="${tools.src}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.tools}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="classpath"/>
- </javac>
-
- <copy todir="${build.tools}">
- <fileset
- dir="${tools.src}"
- includes="**/*.properties"
- />
- </copy>
- </target>
-
- <target name="compile-native">
- <antcall target="compile-core-native">
- <param name="compile.native" value="true"/>
- </antcall>
- </target>
-
- <target name="compile-core-native" depends="compile-core-classes"
- if="compile.native">
-
- <mkdir dir="${build.native}/lib"/>
- <mkdir dir="${build.native}/src/org/apache/hadoop/io/compress/zlib"/>
-
- <javah
- classpath="${build.classes}"
- destdir="${build.native}/src/org/apache/hadoop/io/compress/zlib"
- force="yes"
- verbose="yes"
- >
- <class name="org.apache.hadoop.io.compress.zlib.ZlibCompressor" />
- <class name="org.apache.hadoop.io.compress.zlib.ZlibDecompressor" />
- </javah>
-
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- <arg line="${native.src.dir}/configure"/>
- </exec>
-
- <exec dir="${build.native}" executable="${make.cmd}" failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_DATA_MODEL" value="${sun.arch.data.model}"/>
- <env key="HADOOP_NATIVE_SRCDIR" value="${native.src.dir}"/>
- </exec>
-
- <exec dir="${build.native}" executable="sh" failonerror="true">
- <arg line="${build.native}/libtool mode=install cp
${build.native}/lib/libhadoop.la ${build.native}/lib"/>
- </exec>
- </target>
--->
- <target name="compile-core"
- depends="clover,
- compile-hdfs-classes"
- description="Compile core only">
- </target>
+ <target name="compile-core" depends="clover, compile-hdfs-classes"
description="Compile"/>
<target name="compile-contrib" depends="compile-core">
<subant target="compile">
@@ -473,38 +311,14 @@
</subant>
</target>
- <target name="compile" depends="compile-core, compile-contrib,
compile-ant-tasks" description="Compile core, contrib">
- </target>
-
- <target name="compile-examples"
- depends="compile-core">
- <javac
- encoding="${build.encoding}"
- srcdir="${examples.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${build.examples}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath>
- <path refid="classpath"/>
- <pathelement location="${build.tools}"/>
- </classpath>
- </javac>
- </target>
+ <target name="compile" depends="compile-core, compile-contrib,
compile-ant-tasks" description="Compile hdfs and contrib" />
<!-- ================================================================== -->
- <!-- Make hadoop.jar -->
+ <!-- Make hadoop-hdfs.jar -->
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
<target name="jar" depends="compile-core" description="Make hadoop.jar">
- <!-- <tar compression="gzip" destfile="${build.classes}/bin.tgz">
- <tarfileset dir="bin" mode="755"/>
- </tar> -->
<jar jarfile="${hadoop-hdfs.jar}"
basedir="${build.classes}">
<manifest>
@@ -522,79 +336,9 @@
</target>
<!-- ================================================================== -->
- <!-- Make the Hadoop examples jar. -->
- <!-- ================================================================== -->
- <!-- -->
- <!-- ================================================================== -->
-<!--
- <target name="examples" depends="jar, compile-examples" description="Make
the Hadoop examples jar.">
- <jar jarfile="${build.dir}/${final.name}-examples.jar"
- basedir="${build.examples}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
-
- <target name="tools-jar" depends="jar, compile-tools"
- description="Make the Hadoop tools jar.">
- <jar jarfile="${build.dir}/${final.name}-tools.jar"
- basedir="${build.tools}">
- <manifest>
- <attribute name="Main-Class"
- value="org/apache/hadoop/examples/ExampleDriver"/>
- </manifest>
- </jar>
- </target>
-
- <target name="generate-test-records" depends="compile-rcc-compiler">
- <recordcc destdir="${test.generated.dir}">
- <fileset dir="${test.src.dir}"
- includes="**/*.jr" />
- </recordcc>
- </target>
--->
- <!-- ================================================================== -->
<!-- Compile test code -->
<!-- ================================================================== -->
-<!--
- <target name="compile-core-test" depends="compile-core-classes,
generate-test-records">
- <mkdir dir="${test.core.build.classes}"/>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.generated.dir}"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.core.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args}" />
- <classpath refid="test.classpath"/>
- </javac>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/core"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.core.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
-
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <copy file="${test.src.dir}/core/org/apache/hadoop/cli/testConf.xml"
todir="${test.cache.data}"/>
- </target>
--->
-
<target name="compile-hdfs-test" depends="compile-hdfs-classes,
ivy-retrieve-test">
<mkdir dir="${test.hdfs.build.classes}"/>
<javac
@@ -623,79 +367,7 @@
<copy
file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18"
todir="${test.cache.data}"/>
<copy
file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19"
todir="${test.cache.data}"/>
</target>
-<!--
- <target name="compile-mapred-test" depends="compile-examples,
compile-hdfs-test">
-
- <mkdir dir="${test.mapred.build.classes}"/>
- <mkdir dir="${test.mapred.build.testjar}"/>
- <mkdir dir="${test.mapred.build.testshell}"/>
-
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/mapred"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.mapred.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.mapred.classpath"/>
- </javac>
-
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/mapred/testjar"
- includes="*.java"
- destdir="${test.mapred.build.testjar}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.mapred.classpath"/>
- </javac>
-
- <delete file="${test.mapred.build.testjar}/testjob.jar"/>
- <jar jarfile="${test.mapred.build.testjar}/testjob.jar"
- basedir="${test.mapred.build.testjar}">
- </jar>
-
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/mapred/testshell"
- includes="*.java"
- destdir="${test.mapred.build.testshell}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}"/>
- <classpath refid="test.mapred.classpath"/>
- </javac>
- <delete file="${test.mapred.build.testshell}/testshell.jar"/>
- <jar jarfile="${test.mapred.build.testshell}/testshell.jar"
- basedir="${test.mapred.build.testshell}">
- </jar>
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <delete dir="${test.debug.data}"/>
- <mkdir dir="${test.debug.data}"/>
- <copy
file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testscript.txt"
todir="${test.debug.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.txt"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.jar"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.zip"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tgz"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar.gz"
todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/testMRConf.xml"
todir="${test.cache.data}"/>
- <copy
file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes"
todir="${test.cache.data}"/>
- </target>
--->
<target name="compile-hdfs-with-mr-test" depends="compile-hdfs-test">
<mkdir dir="${test.hdfs.with.mr.build.classes}"/>
<javac
@@ -784,11 +456,6 @@
<sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
<sysproperty key="java.library.path"
value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
- <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++"/>
- </syspropertyset>
<classpath refid="test.classpath"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${test.build.dir}" unless="testcase">
@@ -804,7 +471,6 @@
</target>
<target name="run-test-hdfs-with-mr" depends="compile-hdfs-with-mr-test"
description="Run hdfs unit tests that require mapred">
-
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
<delete dir="${test.log.dir}"/>
@@ -828,11 +494,6 @@
<sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
<sysproperty key="java.library.path"
value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
- <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
- <!-- set compile.c++ in the child jvm only if it is set -->
- <syspropertyset dynamic="no">
- <propertyref name="compile.c++"/>
- </syspropertyset>
<classpath refid="test.hdfs.with.mr.classpath"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${test.build.dir}" unless="testcase">
@@ -860,7 +521,7 @@
</subant>
</target>
- <target name="test-core" description="Run core, hdfs and mapred unit tests">
+ <target name="test-core" description="Run hdfs, hdfs with mapred unit tests">
<delete file="${test.build.dir}/testsfailed"/>
<property name="continueOnFailure" value="true"/>
<antcall target="run-test-hdfs"/>
@@ -1059,7 +720,7 @@
<doclet name="jdiff.JDiff"
path="${jdiff.jar}:${xerces.jar}">
<param name="-apidir" value="${jdiff.xml.dir}"/>
- <param name="-apiname" value="${ant.project.name} ${version}"/>
+ <param name="-apiname" value="hadoop-hdfs ${version}"/>
</doclet>
<packageset dir="src/java"/>
<classpath >
@@ -1084,8 +745,8 @@
maxmemory="${javadoc.maxmemory}">
<doclet name="jdiff.JDiff"
path="${jdiff.jar}:${xerces.jar}">
- <param name="-oldapi" value="${ant.project.name} ${jdiff.stable}"/>
- <param name="-newapi" value="${ant.project.name} ${version}"/>
+ <param name="-oldapi" value="hadoop-hdfs ${jdiff.stable}"/>
+ <param name="-newapi" value="hadoop-hdfs ${version}"/>
<param name="-oldapidir" value="${jdiff.xml.dir}"/>
<param name="-newapidir" value="${jdiff.xml.dir}"/>
<param name="-javadocold" value="${jdiff.stable.javadoc}"/>
@@ -1135,13 +796,6 @@
</fileset>
</copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
-
<subant target="package">
<!--Pass down the version in case its needed again and the target
distribution directory so contribs know where to install to.-->
@@ -1158,10 +812,6 @@
<fileset file="${build.dir}/${final.name}-*.jar"/>
</copy>
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
-
<copy todir="${dist.dir}/conf">
<fileset dir="${conf.dir}" excludes="**/*.template"/>
</copy>
@@ -1188,13 +838,6 @@
<copy todir="${dist.dir}/" file="build.xml"/>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/src/contrib/">
- <include name="*/bin/*" />
- </fileset>
- <fileset dir="${dist.dir}/src/contrib/ec2/bin/image"/>
- </chmod>
-
</target>
<!-- ================================================================== -->
@@ -1237,13 +880,6 @@
</fileset>
</copy>
- <exec dir="${dist.dir}" executable="sh" failonerror="true">
- <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
- <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
- <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
- <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
- </exec>
-
<subant target="package">
<!--Pass down the version in case its needed again and the target
distribution directory so contribs know where to install to.-->
@@ -1259,10 +895,6 @@
<copy todir="${dist.dir}">
<fileset file="${build.dir}/${final.name}-*.jar"/>
</copy>
-
- <copy todir="${dist.dir}/bin">
- <fileset dir="bin"/>
- </copy>
<copy todir="${dist.dir}/conf">
<fileset dir="${conf.dir}" excludes="**/*.template"/>
@@ -1280,15 +912,8 @@
</fileset>
</copy>
- <copy todir="${dist.dir}/c++" includeEmptyDirs="false">
- <fileset dir="${build.dir}/c++"/>
- </copy>
-
<copy todir="${dist.dir}/" file="build.xml"/>
- <chmod perm="ugo+x" type="file" parallel="false">
- <fileset dir="${dist.dir}/bin"/>
- </chmod>
</target>
<target name="binary" depends="bin-package" description="Make tarball
without source and documentation">
@@ -1300,9 +925,6 @@
<exclude name="${final.name}/docs/**" />
<include name="${final.name}/**" />
</tarfileset>
- <tarfileset dir="${build.dir}" mode="755">
- <include name="${final.name}/bin/*" />
- </tarfileset>
</param.listofitems>
</macro_tar>
</target>
@@ -1337,204 +959,6 @@
</subant>
</target>
- <target name="test-c++-libhdfs" depends="compile-c++-libhdfs, compile-core"
if="islibhdfs">
- <delete dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}"/>
- <mkdir dir="${test.libhdfs.dir}/logs"/>
- <mkdir dir="${test.libhdfs.dir}/hdfs/name"/>
-
- <exec dir="${build.c++.libhdfs}" executable="${make.cmd}"
failonerror="true">
- <env key="OS_NAME" value="${os.name}"/>
- <env key="OS_ARCH" value="${os.arch}"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="HADOOP_CONF_DIR" value="${test.libhdfs.conf.dir}"/>
- <env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
- <env key="LIBHDFS_SRC_DIR" value="${c++.libhdfs.src}"/>
- <env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>
- <env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
- <arg value="test"/>
- </exec>
- </target>
-
-<!-- ================================================================== -->
-<!-- librecordio targets. -->
-<!-- ================================================================== -->
-
- <target name="compile-librecordio" depends="init" if="librecordio" >
- <mkdir dir="${build.librecordio}"/>
- <exec dir="${librecordio.src}" executable="${make.cmd}"
failonerror="true">
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- </exec>
- </target>
-
- <target name="test-librecordio" depends="compile-librecordio, compile-core"
if="librecordio">
- <delete dir="${librecordio.test.dir}"/>
- <mkdir dir="${librecordio.test.dir}"/>
- <exec dir="${librecordio.src}/test" executable="${make.cmd}"
failonerror="true">
- <env key="HADOOP_HOME" value="${basedir}"/>
- <env key="XERCESCROOT" value="${xercescroot}"/>
- <env key="LIBRECORDIO_BUILD_DIR" value="${build.librecordio}"/>
- <env key="LIBRECORDIO_TEST_DIR" value="${librecordio.test.dir}"/>
- <arg value="all"/>
- </exec>
- </target>
-
- <target name="package-librecordio" depends="compile-librecordio"
if="librecordio">
- <mkdir dir="${dist.dir}/librecordio"/>
- <copy todir="${dist.dir}/librecordio">
- <fileset dir="${build.librecordio}" casesensitive="yes"
followsymlinks="false">
- <exclude name="**/tests/**"/>
- <exclude name="*.so"/>
- <exclude name="*.o"/>
- </fileset>
- </copy>
- <chmod perm="ugo+x" type="file">
- <fileset dir="${dist.dir}/librecordio"/>
- </chmod>
- </target>
-
- <target name="create-c++-configure" depends="init" if="compile.c++">
- <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.pipes.src}" searchpath="yes"
- failonerror="yes">
- <arg value="-if"/>
- </exec>
- <exec executable="autoreconf" dir="${c++.examples.pipes.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- <antcall target="create-c++-configure-libhdfs"/>
- </target>
-
- <target name="create-c++-configure-libhdfs" depends="check-c++-libhdfs"
if="islibhdfs">
- <exec executable="autoreconf" dir="${c++.libhdfs.src}"
- searchpath="yes" failonerror="yes">
- <arg value="-if"/>
- </exec>
- </target>
-
- <target name="check-c++-makefiles" depends="init" if="compile.c++">
- <condition property="need.c++.utils.makefile">
- <not> <available file="${build.c++.utils}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.pipes.makefile">
- <not> <available file="${build.c++.pipes}/Makefile"/> </not>
- </condition>
- <condition property="need.c++.examples.pipes.makefile">
- <not> <available file="${build.c++.examples.pipes}/Makefile"/> </not>
- </condition>
- </target>
-
- <target name="check-c++-libhdfs">
- <condition property="islibhdfs">
- <and>
- <isset property="compile.c++"/>
- <isset property="libhdfs"/>
- </and>
- </condition>
- </target>
-
- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs"
if="islibhdfs">
- <condition property="need.c++.libhdfs.makefile">
- <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
- </condition>
- </target>
-
- <target name="create-c++-libhdfs-makefile"
depends="check-c++-makefile-libhdfs"
- if="need.c++.libhdfs.makefile">
- <mkdir dir="${build.c++.libhdfs}"/>
- <chmod file="${c++.libhdfs.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.libhdfs.src}/configure" dir="${build.c++.libhdfs}"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
-
- <target name="create-c++-utils-makefile" depends="check-c++-makefiles"
- if="need.c++.utils.makefile">
- <mkdir dir="${build.c++.utils}"/>
- <chmod file="${c++.utils.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.utils.src}/configure" dir="${build.c++.utils}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
-
- <target name="compile-c++-utils" depends="create-c++-utils-makefile"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.utils}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
-
- <target name="create-c++-pipes-makefile" depends="check-c++-makefiles"
- if="need.c++.pipes.makefile">
- <mkdir dir="${build.c++.pipes}"/>
- <chmod file="${c++.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.pipes.src}/configure" dir="${build.c++.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++}"/>
- </exec>
- </target>
-
- <target name="compile-c++-pipes"
- depends="create-c++-pipes-makefile,compile-c++-utils"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.pipes}" searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
-
- <target name="compile-c++"
- depends="compile-c++-pipes"/>
-
- <target name="create-c++-examples-pipes-makefile"
- depends="check-c++-makefiles"
- if="need.c++.examples.pipes.makefile">
- <mkdir dir="${build.c++.examples.pipes}"/>
- <chmod file="${c++.examples.pipes.src}/configure" perm="ugo+x"/>
- <exec executable="${c++.examples.pipes.src}/configure"
- dir="${build.c++.examples.pipes}"
- failonerror="yes">
- <arg value="--prefix=${install.c++.examples}"/>
- <arg value="--with-hadoop-utils=${install.c++}"/>
- <arg value="--with-hadoop-pipes=${install.c++}"/>
- </exec>
- </target>
-
- <target name="compile-c++-examples-pipes"
- depends="create-c++-examples-pipes-makefile,compile-c++-pipes"
- if="compile.c++">
- <exec executable="${make.cmd}" dir="${build.c++.examples.pipes}"
searchpath="yes"
- failonerror="yes">
- <arg value="install"/>
- </exec>
- </target>
-
- <target name="compile-c++-examples"
- depends="compile-c++-examples-pipes"/>
-
- <target name="compile-c++-libhdfs" depends="create-c++-libhdfs-makefile"
if="islibhdfs">
- <exec executable="${make.cmd}" dir="${build.c++.libhdfs}" searchpath="yes"
- failonerror="yes">
- <env key="ac_cv_func_malloc_0_nonnull" value="yes"/>
- <env key="JVM_ARCH" value="${jvm.arch}"/>
- <arg value="install"/>
- </exec>
- </target>
-
-
-
<target name="compile-ant-tasks" depends="compile-core">
<javac
encoding="${build.encoding}"
@@ -1554,20 +978,18 @@
<target name="ant-tasks" depends="jar, compile-ant-tasks">
<copy file="${anttasks.dir}/org/apache/hadoop/ant/antlib.xml"
todir="${build.anttasks}/org/apache/hadoop/ant"/>
- <jar destfile="${build.dir}/${final.name}-ant.jar">
+ <jar destfile="${build.dir}/${ant.final.name}.jar">
<fileset dir="${build.anttasks}"/>
</jar>
</target>
-
-
<target name="clover" depends="clover.setup, clover.info"
description="Instrument the Unit tests using Clover. To use, specify
-Dclover.home=<base of clover installation> -Drun.clover=true on the
command line."/>
<target name="clover.setup" if="clover.enabled">
<taskdef resource="cloverlib.xml" classpath="${clover.jar}"/>
<mkdir dir="${clover.db.dir}"/>
<clover-setup initString="${clover.db.dir}/hadoop_coverage.db">
- <fileset dir="src" includes="core/**/* tools/**/* hdfs/**/* mapred/**/*"/>
+ <fileset dir="src" includes="java/**/*"/>
</clover-setup>
</target>
@@ -1773,7 +1195,7 @@
description="Retrieve Ivy-managed artifacts for the test configurations">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"/>
- <ivy:cachepath pathid="ivy.test.classpath" conf="test"/>
+ <ivy:cachepath pathid="ivy-test.classpath" conf="test"/>
</target>
<target name="ivy-retrieve-common" depends="ivy-resolve-common"
@@ -1804,10 +1226,10 @@
<fail>
<condition >
<not>
- <available file="${hadoop.jar}" />
+ <available file="${hadoop-hdfs.jar}" />
</not>
</condition>
- Not found: ${hadoop.jar}
+ Not found: ${hadoop-hdfs.jar}
Please run the target "jar" in the main build file
</fail>
@@ -1840,7 +1262,7 @@
<target name="copy-jar-to-maven" depends="ready-to-publish">
- <copy file="${hadoop.jar}"
+ <copy file="${hadoop-hdfs.jar}"
tofile="${build.ivy.maven.jar}"/>
<checksum file="${build.ivy.maven.jar}" algorithm="md5"/>
</target>
@@ -1855,7 +1277,7 @@
</copy>
</presetdef>
- <expandingcopy file="ivy/hadoop-core.pom"
+ <expandingcopy file="ivy/hadoop-hdfs.pom"
tofile="${build.ivy.maven.pom}"/>
<checksum file="${build.ivy.maven.pom}" algorithm="md5"/>
</target>
@@ -1866,52 +1288,4 @@
</target>
- <!-- taskcontroller targets -->
- <target name="init-task-controller-build">
- <mkdir dir="${build.c++.task-controller}" />
- <copy todir="${build.c++.task-controller}">
- <fileset dir="${c++.task-controller.src}" includes="*.c">
- </fileset>
- <fileset dir="${c++.task-controller.src}" includes="*.h">
- </fileset>
- </copy>
- <chmod file="${c++.task-controller.src}/configure" perm="ugo+x"/>
- <condition property="task-controller.conf.dir.passed">
- <not>
- <equals arg1="${hadoop.conf.dir}" arg2="$${hadoop.conf.dir}"/>
- </not>
- </condition>
- </target>
- <target name="configure-task-controller" depends="init,
- init-task-controller-build,
- task-controller-configuration-with-confdir,
- task-controller-configuration-with-no-confdir">
- </target>
- <target name="task-controller-configuration-with-confdir"
- if="task-controller.conf.dir.passed" >
- <exec executable="${c++.task-controller.src}/configure"
- dir="${build.c++.task-controller}" failonerror="yes">
- <arg value="--prefix=${task-controller.install.dir}" />
- <arg value="--with-confdir=${hadoop.conf.dir}" />
- </exec>
- </target>
- <target name="task-controller-configuration-with-no-confdir"
- unless="task-controller.conf.dir.passed">
- <exec executable="${c++.task-controller.src}/configure"
- dir="${build.c++.task-controller}" failonerror="yes">
- <arg value="--prefix=${task-controller.install.dir}" />
- </exec>
- </target>
- <!--
- * Create the installation directory.
- * Do a make install.
- -->
- <target name="task-controller" depends="configure-task-controller">
- <mkdir dir="${task-controller.install.dir}" />
- <exec executable="${make.cmd}" dir="${build.c++.task-controller}"
- searchpath="yes" failonerror="yes">
- <arg value="install" />
- </exec>
- </target>
- <!-- end of task-controller target -->
</project>