Author: omalley
Date: Fri Jun 6 17:05:57 2008
New Revision: 664207
URL: http://svn.apache.org/viewvc?rev=664207&view=rev
Log:
HADOOP-3512. Separate out the tools into a tools jar.
Added:
hadoop/core/trunk/src/tools/
hadoop/core/trunk/src/tools/org/
hadoop/core/trunk/src/tools/org/apache/
hadoop/core/trunk/src/tools/org/apache/hadoop/
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java
- copied, changed from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp_Counter.properties
- copied unchanged from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles_Counter.properties
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
- copied, changed from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/util/HadoopArchives.java
hadoop/core/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java
- copied, changed from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java
Removed:
hadoop/core/trunk/src/java/org/apache/hadoop/tools/
hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles_Counter.properties
hadoop/core/trunk/src/java/org/apache/hadoop/util/HadoopArchives.java
Modified:
hadoop/core/trunk/CHANGES.txt
hadoop/core/trunk/bin/hadoop
hadoop/core/trunk/build.xml
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
Modified: hadoop/core/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=664207&r1=664206&r2=664207&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Fri Jun 6 17:05:57 2008
@@ -86,6 +86,8 @@
block in the log. Also include a CorruptedBlocks metric to track the size
of the corrupted block map. (cdouglas)
+ HADOOP-3512. Separate out the tools into a tools jar. (omalley)
+
NEW FEATURES
HADOOP-3074. Provides a UrlStreamHandler for DFS and other FS,
Modified: hadoop/core/trunk/bin/hadoop
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/bin/hadoop?rev=664207&r1=664206&r2=664207&view=diff
==============================================================================
--- hadoop/core/trunk/bin/hadoop (original)
+++ hadoop/core/trunk/bin/hadoop Fri Jun 6 17:05:57 2008
@@ -141,6 +141,13 @@
CLASSPATH=${CLASSPATH}:$f;
done
+for f in $HADOOP_HOME/hadoop-*-tools.jar; do
+ TOOL_PATH=${TOOL_PATH}:$f;
+done
+for f in $HADOOP_HOME/build/hadoop-*-tools.jar; do
+ TOOL_PATH=${TOOL_PATH}:$f;
+done
+
# add user-specified CLASSPATH last
if [ "$HADOOP_CLASSPATH" != "" ]; then
CLASSPATH=${CLASSPATH}:${HADOOP_CLASSPATH}
@@ -159,6 +166,7 @@
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
HADOOP_HOME=`cygpath -d "$HADOOP_HOME"`
HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
+ TOOL_PATH=`cygpath -d "$TOOL_PATH"`
fi
# setup 'java.library.path' for native-hadoop code if necessary
JAVA_LIBRARY_PATH=''
@@ -228,13 +236,15 @@
elif [ "$COMMAND" = "jar" ] ; then
CLASS=org.apache.hadoop.util.RunJar
elif [ "$COMMAND" = "distcp" ] ; then
- CLASS=org.apache.hadoop.util.CopyFiles
+ CLASS=org.apache.hadoop.tools.DistCp
+ CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "daemonlog" ] ; then
CLASS=org.apache.hadoop.log.LogLevel
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "archive" ] ; then
- CLASS=org.apache.hadoop.util.HadoopArchives
+ CLASS=org.apache.hadoop.tools.HadoopArchives
+ CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
else
CLASS=$COMMAND
Modified: hadoop/core/trunk/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/build.xml?rev=664207&r1=664206&r2=664207&view=diff
==============================================================================
--- hadoop/core/trunk/build.xml (original)
+++ hadoop/core/trunk/build.xml Fri Jun 6 17:05:57 2008
@@ -46,10 +46,12 @@
<property name="c++.pipes.src" value="${c++.src}/pipes"/>
<property name="c++.examples.pipes.src" value="${examples.dir}/pipes"/>
<property name="libhdfs.src" value="${c++.src}/libhdfs"/>
+ <property name="tools.src" value="${basedir}/src/tools"/>
<property name="build.dir" value="${basedir}/build"/>
<property name="build.classes" value="${build.dir}/classes"/>
<property name="build.src" value="${build.dir}/src"/>
+ <property name="build.tools" value="${build.dir}/tools"/>
<property name="build.webapps" value="${build.dir}/webapps"/>
<property name="build.examples" value="${build.dir}/examples"/>
<property name="build.anttasks" value="${build.dir}/ant"/>
@@ -149,6 +151,7 @@
<pathelement location="${test.src.dir}"/>
<pathelement location="${build.dir}"/>
<pathelement location="${build.examples}"/>
+ <pathelement location="${build.tools}"/>
<fileset dir="${test.lib.dir}">
<include name="**/*.jar" />
<exclude name="**/excluded/" />
@@ -187,6 +190,7 @@
<target name="init">
<mkdir dir="${build.dir}"/>
<mkdir dir="${build.classes}"/>
+ <mkdir dir="${build.tools}"/>
<mkdir dir="${build.src}"/>
<mkdir dir="${build.webapps}/task/WEB-INF"/>
<mkdir dir="${build.webapps}/job/WEB-INF"/>
@@ -320,6 +324,29 @@
</target>
+ <target name="compile-tools" depends="init">
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${tools.src}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${build.tools}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="classpath"/>
+ </javac>
+
+ <copy todir="${build.tools}">
+ <fileset
+ dir="${tools.src}"
+ includes="**/*.properties"
+ />
+ </copy>
+ </target>
+
<target name="compile-core-native" depends="compile-core-classes"
if="compile.native">
@@ -379,7 +406,7 @@
</subant>
</target>
- <target name="compile" depends="compile-core, compile-contrib,
compile-ant-tasks" description="Compile core, contrib">
+ <target name="compile" depends="compile-core, compile-contrib,
compile-ant-tasks, compile-tools" description="Compile core, contrib">
</target>
<target name="compile-examples"
@@ -440,6 +467,17 @@
</jar>
</target>
+ <target name="tools-jar" depends="jar, compile-tools"
+ description="Make the Hadoop tools jar.">
+ <jar jarfile="${build.dir}/${final.name}-tools.jar"
+ basedir="${build.tools}">
+ <manifest>
+ <attribute name="Main-Class"
+ value="org/apache/hadoop/examples/ExampleDriver"/>
+ </manifest>
+ </jar>
+ </target>
+
<!-- ================================================================== -->
<!-- Make the Hadoop metrics jar. (for use outside Hadoop) -->
<!-- ================================================================== -->
@@ -463,7 +501,7 @@
<!-- ================================================================== -->
<!-- Compile test code -->
<!-- ================================================================== -->
- <target name="compile-core-test" depends="compile-examples,
generate-test-records">
+ <target name="compile-core-test" depends="compile-examples, compile-tools,
generate-test-records">
<javac
encoding="${build.encoding}"
srcdir="${test.generated.dir}"
@@ -764,7 +802,7 @@
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
- <target name="package" depends="compile, jar, javadoc, examples, jar-test,
ant-tasks, package-libhdfs"
+ <target name="package" depends="compile, jar, javadoc, examples, tools-jar,
jar-test, ant-tasks, package-libhdfs"
description="Build distribution">
<mkdir dir="${dist.dir}"/>
<mkdir dir="${dist.dir}/lib"/>
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java?rev=664207&r1=664206&r2=664207&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java Fri Jun
6 17:05:57 2008
@@ -29,7 +29,7 @@
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hadoop.util.CopyFiles;
+import org.apache.hadoop.tools.DistCp;
import org.apache.hadoop.util.ToolRunner;
@@ -224,7 +224,7 @@
/** copy files from local file system to local file system */
public void testCopyFromLocalToLocal() throws Exception {
MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
@@ -243,7 +243,7 @@
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-log",
namenode+"/logs",
namenode+"/srcdat",
@@ -272,7 +272,7 @@
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-log",
namenode+"/logs",
"file:///"+TEST_ROOT_DIR+"/srcdat",
@@ -301,7 +301,7 @@
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-log",
"/logs",
namenode+"/srcdat",
@@ -329,7 +329,7 @@
namenode = FileSystem.getDefaultUri(conf).toString();
if (namenode.startsWith("hdfs://")) {
MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-p",
"-log",
namenode+"/logs",
@@ -346,7 +346,7 @@
updateFiles(namenode, "/srcdat", files, nupdate);
deldir(namenode, "/logs");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-p",
"-update",
"-log",
@@ -359,7 +359,7 @@
checkUpdate(dchkpoint, namenode, "/destdat", files, nupdate));
deldir(namenode, "/logs");
- ToolRunner.run(new CopyFiles(conf), new String[] {
+ ToolRunner.run(new DistCp(conf), new String[] {
"-p",
"-overwrite",
"-log",
@@ -383,14 +383,14 @@
public void testCopyDuplication() throws Exception {
try {
MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/src2/srcdat"});
assertTrue("Source and destination directories do not match.",
checkFiles("file:///", TEST_ROOT_DIR+"/src2/srcdat", files));
- assertEquals(CopyFiles.DuplicationException.ERROR_CODE,
- ToolRunner.run(new CopyFiles(new Configuration()),
+ assertEquals(DistCp.DuplicationException.ERROR_CODE,
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/src2/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat",}));
@@ -408,7 +408,7 @@
try {
MyFile[] files = {createFile(root, fs)};
//copy a dir with a single file
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
"file:///"+TEST_ROOT_DIR+"/destdat"});
assertTrue("Source and destination directories do not match.",
@@ -418,7 +418,7 @@
String fname = files[0].getName();
Path p = new Path(root, fname);
FileSystem.LOG.info("fname=" + fname + ", exists? " + fs.exists(p));
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
"file:///"+TEST_ROOT_DIR+"/dest2/"+fname});
assertTrue("Source and destination directories do not match.",
@@ -428,7 +428,7 @@
fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2"));
MyFile[] files2 = {createFile(root, fs, 0)};
String sname = files2[0].getName();
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"-update",
"file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
"file:///"+TEST_ROOT_DIR+"/dest2/"});
@@ -436,7 +436,7 @@
checkFiles("file:///", TEST_ROOT_DIR+"/dest2", files2));
updateFiles("file:///", TEST_ROOT_DIR+"/srcdat", files2, 1);
//copy single file to existing dir w/ dst name conflict
- ToolRunner.run(new CopyFiles(new Configuration()),
+ ToolRunner.run(new DistCp(new Configuration()),
new String[] {"-update",
"file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
"file:///"+TEST_ROOT_DIR+"/dest2/"});
@@ -464,7 +464,7 @@
for(int i = 0; i < srcstat.length; i++) {
fs.setOwner(srcstat[i].getPath(), "u" + i, null);
}
- ToolRunner.run(new CopyFiles(conf),
+ ToolRunner.run(new DistCp(conf),
new String[]{"-pu", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(nnUri, "/destdat", files));
@@ -483,7 +483,7 @@
for(int i = 0; i < srcstat.length; i++) {
fs.setOwner(srcstat[i].getPath(), null, "g" + i);
}
- ToolRunner.run(new CopyFiles(conf),
+ ToolRunner.run(new DistCp(conf),
new String[]{"-pg", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(nnUri, "/destdat", files));
@@ -505,7 +505,7 @@
fs.setPermission(srcstat[i].getPath(), permissions[i]);
}
- ToolRunner.run(new CopyFiles(conf),
+ ToolRunner.run(new DistCp(conf),
new String[]{"-pp", nnUri+"/srcdat", nnUri+"/destdat"});
assertTrue("Source and destination directories do not match.",
checkFiles(nnUri, "/destdat", files));
@@ -539,7 +539,7 @@
}
JobConf job = mr.createJobConf();
job.setLong("distcp.bytes.per.map", totsize / 3);
- ToolRunner.run(new CopyFiles(job),
+ ToolRunner.run(new DistCp(job),
new String[] {"-m", "100",
"-log",
namenode+"/logs",
@@ -553,7 +553,7 @@
deldir(namenode, "/destdat");
deldir(namenode, "/logs");
- ToolRunner.run(new CopyFiles(job),
+ ToolRunner.run(new DistCp(job),
new String[] {"-m", "1",
"-log",
namenode+"/logs",
Modified: hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java?rev=664207&r1=664206&r2=664207&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java
(original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/fs/TestHarFileSystem.java Fri
Jun 6 17:05:57 2008
@@ -41,7 +41,7 @@
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
-import org.apache.hadoop.util.HadoopArchives;
+import org.apache.hadoop.tools.HadoopArchives;
import org.apache.hadoop.util.ToolRunner;
import junit.framework.TestCase;
@@ -196,4 +196,4 @@
assertTrue("number of bytes left should be -1", reduceIn.read(b) == -1);
reduceIn.close();
}
-}
\ No newline at end of file
+}
Copied: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java (from
r664114, hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java)
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java?p2=hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java&p1=hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java&r1=664114&r2=664207&rev=664207&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/DistCp.java Fri Jun 6
17:05:57 2008
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.util;
+package org.apache.hadoop.tools;
import java.io.BufferedReader;
import java.io.DataInput;
@@ -57,13 +57,16 @@
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileRecordReader;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
/**
* A Map-reduce program to recursively copy directories between
* different file-systems.
*/
-public class CopyFiles implements Tool {
- private static final Log LOG = LogFactory.getLog(CopyFiles.class);
+public class DistCp implements Tool {
+ private static final Log LOG = LogFactory.getLog(DistCp.class);
private static final String NAME = "distcp";
@@ -168,7 +171,7 @@
return conf;
}
- public CopyFiles(Configuration conf) {
+ public DistCp(Configuration conf) {
setConf(conf);
}
@@ -452,7 +455,7 @@
private void updatePermissions(FileStatus src, FileStatus dst
) throws IOException {
if (preserve_status) {
- CopyFiles.updatePermissions(src, dst, preseved, destFileSys);
+ DistCp.updatePermissions(src, dst, preseved, destFileSys);
}
}
@@ -776,8 +779,8 @@
}
public static void main(String[] args) throws Exception {
- JobConf job = new JobConf(CopyFiles.class);
- CopyFiles distcp = new CopyFiles(job);
+ JobConf job = new JobConf(DistCp.class);
+ DistCp distcp = new DistCp(job);
int res = ToolRunner.run(distcp, args);
System.exit(res);
}
@@ -839,7 +842,7 @@
//Job configuration
private static JobConf createJobConf(Configuration conf) {
- JobConf jobconf = new JobConf(conf, CopyFiles.class);
+ JobConf jobconf = new JobConf(conf, DistCp.class);
jobconf.setJobName(NAME);
// turn off speculative execution, because DFS doesn't handle
Copied: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java
(from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/util/HadoopArchives.java)
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java?p2=hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java&p1=hadoop/core/trunk/src/java/org/apache/hadoop/util/HadoopArchives.java&r1=664114&r2=664207&rev=664207&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/util/HadoopArchives.java
(original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/HadoopArchives.java Fri
Jun 6 17:05:57 2008
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.util;
+package org.apache.hadoop.tools;
import java.io.FileNotFoundException;
import java.io.IOException;
@@ -56,7 +56,8 @@
import org.apache.hadoop.mapred.SequenceFileRecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
-
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
/**
* a archive creation utility.
@@ -310,7 +311,7 @@
Path outputPath = new Path(dest, archiveName);
FileOutputFormat.setOutputPath(conf, outputPath);
conf.set(DST_DIR_LABEL, outputPath.toString());
- final String randomId = CopyFiles.getRandomId();
+ final String randomId = DistCp.getRandomId();
Path jobDirectory = new Path(new JobClient().getSystemDir(),
NAME + "_" + randomId);
conf.set(JOB_DIR_LABEL, jobDirectory.toString());
Copied: hadoop/core/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java
(from r664114,
hadoop/core/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java)
URL:
http://svn.apache.org/viewvc/hadoop/core/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java?p2=hadoop/core/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java&p1=hadoop/core/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java&r1=664114&r2=664207&rev=664207&view=diff
==============================================================================
--- hadoop/core/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java (original)
+++ hadoop/core/trunk/src/tools/org/apache/hadoop/tools/Logalyzer.java Fri Jun
6 17:05:57 2008
@@ -46,7 +46,6 @@
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.mapred.lib.LongSumReducer;
-import org.apache.hadoop.util.CopyFiles;
/**
* Logalyzer: A utility tool for archiving and analyzing hadoop logs.
@@ -184,7 +183,7 @@
throws IOException
{
String destURL = FileSystem.getDefaultUri(fsConfig) + archiveDirectory;
- CopyFiles.copy(fsConfig, logListURI, destURL, null, true, false);
+ DistCp.copy(new JobConf(fsConfig), logListURI, destURL, null, true, false);
}
/**