Author: todd
Date: Tue May 17 00:53:51 2011
New Revision: 1103968
URL: http://svn.apache.org/viewvc?rev=1103968&view=rev
Log:
HADOOP-7286. Refactor the du/dus/df commands to conform to new FsCommand class.
Contributed by Daryn Sharp.
Added:
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsUsage.java
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsShell.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Command.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsCommand.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Ls.java
hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Stat.java
hadoop/common/trunk/src/test/core/org/apache/hadoop/cli/testConf.xml
Modified: hadoop/common/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Tue May 17 00:53:51 2011
@@ -158,6 +158,9 @@ Trunk (unreleased changes)
HADOOP-7291. Update Hudson job not to run test-contrib. (Nigel Daley via
eli)
+ HADOOP-7286. Refactor the du/dus/df commands to conform to new FsCommand
+ class. (Daryn Sharp via todd)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsShell.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsShell.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/FsShell.java Tue May 17
00:53:51 2011
@@ -38,7 +38,6 @@ import org.apache.hadoop.fs.shell.Comman
import org.apache.hadoop.fs.shell.CommandFactory;
import org.apache.hadoop.fs.shell.CommandFormat;
import org.apache.hadoop.fs.shell.FsCommand;
-import org.apache.hadoop.fs.shell.PathData;
import org.apache.hadoop.fs.shell.PathExceptions.PathNotFoundException;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RPC;
@@ -64,7 +63,6 @@ public class FsShell extends Configured
static final String GET_SHORT_USAGE = "-get [-ignoreCrc] [-crc] <src>
<localdst>";
static final String COPYTOLOCAL_SHORT_USAGE = GET_SHORT_USAGE.replace(
"-get", "-copyToLocal");
- static final String DU_USAGE="-du [-s] [-h] <paths...>";
/**
*/
@@ -310,128 +308,6 @@ public class FsShell extends Configured
System.err.println("Option '-moveToLocal' is not implemented yet.");
}
- /**
- * Show the size of a partition in the filesystem that contains
- * the specified <i>path</i>.
- * @param path a path specifying the source partition. null means /.
- * @throws IOException
- */
- void df(String path) throws IOException {
- if (path == null) path = "/";
- final Path srcPath = new Path(path);
- final FileSystem srcFs = srcPath.getFileSystem(getConf());
- if (! srcFs.exists(srcPath)) {
- throw new PathNotFoundException(path);
- }
- final FsStatus stats = srcFs.getStatus(srcPath);
- final int PercentUsed = (int)(100.0f * (float)stats.getUsed() /
(float)stats.getCapacity());
- System.out.println("Filesystem\t\tSize\tUsed\tAvail\tUse%");
- System.out.printf("%s\t\t%d\t%d\t%d\t%d%%\n",
- path,
- stats.getCapacity(), stats.getUsed(), stats.getRemaining(),
- PercentUsed);
- }
-
- /**
- * Show the size of all files that match the file pattern <i>src</i>
- * @param cmd
- * @param pos ignore anything before this pos in cmd
- * @throws IOException
- * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
- */
- void du(String[] cmd, int pos) throws IOException {
- CommandFormat c = new CommandFormat(
- "du", 0, Integer.MAX_VALUE, "h", "s");
- List<String> params;
- try {
- params = c.parse(cmd, pos);
- } catch (IllegalArgumentException iae) {
- System.err.println("Usage: java FsShell " + DU_USAGE);
- throw iae;
- }
- boolean humanReadable = c.getOpt("h");
- boolean summary = c.getOpt("s");
-
- // Default to cwd
- if (params.isEmpty()) {
- params.add(".");
- }
-
- List<UsagePair> usages = new ArrayList<UsagePair>();
-
- for (String src : params) {
- Path srcPath = new Path(src);
- FileSystem srcFs = srcPath.getFileSystem(getConf());
- FileStatus globStatus[] = srcFs.globStatus(srcPath);
- FileStatus statusToPrint[];
-
- if (summary) {
- statusToPrint = globStatus;
- } else {
- Path statPaths[] = FileUtil.stat2Paths(globStatus, srcPath);
- try {
- statusToPrint = srcFs.listStatus(statPaths);
- } catch(FileNotFoundException fnfe) {
- statusToPrint = null;
- }
- }
- if ((statusToPrint == null) || ((statusToPrint.length == 0) &&
- (!srcFs.exists(srcPath)))){
- throw new PathNotFoundException(src);
- }
-
- if (!summary) {
- System.out.println("Found " + statusToPrint.length + " items");
- }
-
- for (FileStatus stat : statusToPrint) {
- long length;
- if (summary || stat.isDirectory()) {
- length = srcFs.getContentSummary(stat.getPath()).getLength();
- } else {
- length = stat.getLen();
- }
-
- usages.add(new UsagePair(String.valueOf(stat.getPath()), length));
- }
- }
- printUsageSummary(usages, humanReadable);
- }
-
- /**
- * Show the summary disk usage of each dir/file
- * that matches the file pattern <i>src</i>
- * @param cmd
- * @param pos ignore anything before this pos in cmd
- * @throws IOException
- * @see org.apache.hadoop.fs.FileSystem#globStatus(Path)
- */
- void dus(String[] cmd, int pos) throws IOException {
- String newcmd[] = new String[cmd.length + 1];
- System.arraycopy(cmd, 0, newcmd, 0, cmd.length);
- newcmd[cmd.length] = "-s";
- du(newcmd, pos);
- }
-
- private void printUsageSummary(List<UsagePair> usages,
- boolean humanReadable) {
- int maxColumnWidth = 0;
- for (UsagePair usage : usages) {
- String toPrint = humanReadable ?
- StringUtils.humanReadableInt(usage.bytes) :
String.valueOf(usage.bytes);
- if (toPrint.length() > maxColumnWidth) {
- maxColumnWidth = toPrint.length();
- }
- }
-
- for (UsagePair usage : usages) {
- String toPrint = humanReadable ?
- StringUtils.humanReadableInt(usage.bytes) :
String.valueOf(usage.bytes);
- System.out.printf("%-"+ (maxColumnWidth + BORDER) +"s", toPrint);
- System.out.println(usage.path);
- }
- }
-
/**
* Move files that match the file pattern <i>srcf</i>
* to a destination file.
@@ -639,8 +515,8 @@ public class FsShell extends Configured
String summary = "hadoop fs is the command to execute fs commands. " +
"The full syntax is: \n\n" +
"hadoop fs [-fs <local | file system URI>] [-conf <configuration
file>]\n\t" +
- "[-D <property=value>] [-df [<path>]] [-du [-s] [-h] <path>]\n\t" +
- "[-dus <path>] [-mv <src> <dst>] [-cp <src> <dst>]\n\t" +
+ "[-D <property=value>]\n\t" +
+ "[-mv <src> <dst>] [-cp <src> <dst>]\n\t" +
"[-put <localsrc> ... <dst>] [-copyFromLocal <localsrc> ... <dst>]\n\t" +
"[-moveFromLocal <localsrc> ... <dst>] [" +
GET_SHORT_USAGE + "\n\t" +
@@ -662,25 +538,6 @@ public class FsShell extends Configured
"\t\tappear first on the command line. Exactly one additional\n" +
"\t\targument must be specified. \n";
- String df = "-df [<path>]: \tShows the capacity, free and used space of
the filesystem.\n"+
- "\t\tIf the filesystem has multiple partitions, and no path to a
particular partition\n"+
- "\t\tis specified, then the status of the root partitions will be
shown.\n";
-
- String du = "-du [-s] [-h] <path>: \tShow the amount of space, in bytes,
used by the files that \n" +
- "\t\tmatch the specified file pattern. The following flags are
optional:\n" +
- "\t\t -s Rather than showing the size of each individual file that\n"
+
- "\t\t matches the pattern, shows the total (summary) size.\n" +
- "\t\t -h Formats the sizes of files in a human-readable fashion\n" +
- "\t\t rather than a number of bytes.\n" +
- "\n" +
- "\t\tNote that, even without the -s option, this only shows size
summaries\n" +
- "\t\tone level deep into a directory.\n" +
- "\t\tThe output is in the form \n" +
- "\t\t\tsize\tname(full path)\n";
-
- String dus = "-dus <path>: \tShow the amount of space, in bytes, used by
the files that \n" +
- "\t\tmatch the specified file pattern. This is equivalent to -du -s
above.\n";
-
String mv = "-mv <src> <dst>: Move files that match the specified file
pattern <src>\n" +
"\t\tto a destination <dst>. When moving multiple files, the \n" +
"\t\tdestination must be a directory. \n";
@@ -720,12 +577,6 @@ public class FsShell extends Configured
System.out.println(conf);
} else if ("D".equals(cmd)) {
System.out.println(D);
- } else if ("df".equals(cmd)) {
- System.out.println(df);
- } else if ("du".equals(cmd)) {
- System.out.println(du);
- } else if ("dus".equals(cmd)) {
- System.out.println(dus);
} else if ("mv".equals(cmd)) {
System.out.println(mv);
} else if ("cp".equals(cmd)) {
@@ -750,14 +601,13 @@ public class FsShell extends Configured
System.out.println(summary);
for (String thisCmdName : commandFactory.getNames()) {
instance = commandFactory.getInstance(thisCmdName);
- System.out.println("\t[" + instance.getUsage() + "]");
+ if (!instance.isDeprecated()) {
+ System.out.println("\t[" + instance.getUsage() + "]");
+ }
}
System.out.println("\t[-help [cmd]]\n");
System.out.println(fs);
- System.out.println(df);
- System.out.println(du);
- System.out.println(dus);
System.out.println(mv);
System.out.println(cp);
System.out.println(put);
@@ -768,9 +618,11 @@ public class FsShell extends Configured
System.out.println(moveToLocal);
for (String thisCmdName : commandFactory.getNames()) {
- printHelp(commandFactory.getInstance(thisCmdName));
+ instance = commandFactory.getInstance(thisCmdName);
+ if (!instance.isDeprecated()) {
+ printHelp(instance);
+ }
}
-
System.out.println(help);
}
}
@@ -792,34 +644,6 @@ public class FsShell extends Configured
}
/**
- * Apply operation specified by 'cmd' on all parameters
- * starting from argv[startindex].
- */
- private int doall(String cmd, String argv[], int startindex) {
- int exitCode = 0;
- int i = startindex;
-
- //
- // for each source file, issue the command
- //
- for (; i < argv.length; i++) {
- try {
- //
- // issue the command to the fs
- //
- if ("-df".equals(cmd)) {
- df(argv[i]);
- }
- } catch (IOException e) {
- LOG.debug("Error", e);
- exitCode = -1;
- displayError(cmd, e);
- }
- }
- return exitCode;
- }
-
- /**
* Displays format of commands.
*
*/
@@ -838,12 +662,6 @@ public class FsShell extends Configured
} else if ("-D".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [-D <[property=value>]");
- } else if ("-du".equals(cmd) || "-dus".equals(cmd)) {
- System.err.println("Usage: java FsShell" +
- " [" + cmd + " <path>]");
- } else if ("-df".equals(cmd) ) {
- System.err.println("Usage: java FsShell" +
- " [" + cmd + " [<path>]]");
} else if ("-mv".equals(cmd) || "-cp".equals(cmd)) {
System.err.println("Usage: java FsShell" +
" [" + cmd + " <src> <dst>]");
@@ -860,9 +678,6 @@ public class FsShell extends Configured
" [" + cmd + " [-crc] <src> <localdst>]");
} else {
System.err.println("Usage: java FsShell");
- System.err.println(" [-df [<path>]]");
- System.err.println(" [-du [-s] [-h] <path>]");
- System.err.println(" [-dus <path>]");
System.err.println(" [-mv <src> <dst>]");
System.err.println(" [-cp <src> <dst>]");
System.err.println(" [-put <localsrc> ... <dst>]");
@@ -872,8 +687,10 @@ public class FsShell extends Configured
System.err.println(" [" + COPYTOLOCAL_SHORT_USAGE + "]");
System.err.println(" [-moveToLocal [-crc] <src> <localdst>]");
for (String name : commandFactory.getNames()) {
- instance = commandFactory.getInstance(name);
- System.err.println(" [" + instance.getUsage() + "]");
+ instance = commandFactory.getInstance(name);
+ if (!instance.isDeprecated()) {
+ System.err.println(" [" + instance.getUsage() + "]");
+ }
}
System.err.println(" [-help [cmd]]");
System.err.println();
@@ -967,16 +784,6 @@ public class FsShell extends Configured
exitCode = rename(argv, getConf());
} else if ("-cp".equals(cmd)) {
exitCode = copy(argv, getConf());
- } else if ("-df".equals(cmd)) {
- if (argv.length-1 > 0) {
- exitCode = doall(cmd, argv, i);
- } else {
- df(null);
- }
- } else if ("-du".equals(cmd)) {
- du(argv, i);
- } else if ("-dus".equals(cmd)) {
- dus(argv, i);
} else if ("-help".equals(cmd)) {
if (i < argv.length) {
printHelp(argv[i]);
@@ -1043,17 +850,4 @@ public class FsShell extends Configured
}
System.exit(res);
}
-
- /**
- * Utility class for a line of du output
- */
- private static class UsagePair {
- public String path;
- public long bytes;
-
- public UsagePair(String path, long bytes) {
- this.path = path;
- this.bytes = bytes;
- }
- }
}
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Command.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Command.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Command.java
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Command.java Tue
May 17 00:53:51 2011
@@ -139,6 +139,10 @@ abstract public class Command extends Co
public int run(String...argv) {
LinkedList<String> args = new LinkedList<String>(Arrays.asList(argv));
try {
+ if (isDeprecated()) {
+ displayWarning(
+ "DEPRECATED: Please use '"+ getReplacementCommand() + "'
instead.");
+ }
processOptions(args);
processArguments(expandArguments(args));
} catch (IOException e) {
@@ -358,7 +362,7 @@ abstract public class Command extends Co
*/
public String getUsage() {
String cmd = "-" + getCommandName();
- String usage = getCommandField("USAGE");
+ String usage = isDeprecated() ? "" : getCommandField("USAGE");
return usage.isEmpty() ? cmd : cmd + " " + usage;
}
@@ -367,7 +371,25 @@ abstract public class Command extends Co
* @return text of the usage
*/
public String getDescription() {
- return getCommandField("DESCRIPTION");
+ return isDeprecated()
+ ? "(DEPRECATED) Same as '" + getReplacementCommand() + "'"
+ : getCommandField("DESCRIPTION");
+ }
+
+ /**
+ * Is the command deprecated?
+ * @return boolean
+ */
+ public final boolean isDeprecated() {
+ return (getReplacementCommand() != null);
+ }
+
+ /**
+ * The replacement for a deprecated command
+ * @return null if not deprecated, else alternative command
+ */
+ public String getReplacementCommand() {
+ return null;
}
/**
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Delete.java Tue May
17 00:53:51 2011
@@ -43,12 +43,13 @@ class Delete extends FsCommand {
/** remove non-directory paths */
public static class Rm extends FsCommand {
public static final String NAME = "rm";
- public static final String USAGE = "[-skipTrash] <src> ...";
+ public static final String USAGE = "[-r|-R] [-skipTrash] <src> ...";
public static final String DESCRIPTION =
"Delete all files that match the specified file pattern.\n" +
"Equivalent to the Unix command \"rm <src>\"\n" +
"-skipTrash option bypasses trash, if enabled, and immediately\n" +
- "deletes <src>";
+ "deletes <src>\n" +
+ " -[rR] Recursively deletes directories";
private boolean skipTrash = false;
private boolean deleteDirs = false;
@@ -56,9 +57,9 @@ class Delete extends FsCommand {
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
CommandFormat cf = new CommandFormat(
- null, 1, Integer.MAX_VALUE, "R", "skipTrash");
+ null, 1, Integer.MAX_VALUE, "r", "R", "skipTrash");
cf.parse(args);
- deleteDirs = cf.getOpt("R");
+ deleteDirs = cf.getOpt("r") || cf.getOpt("R");
skipTrash = cf.getOpt("skipTrash");
}
@@ -95,17 +96,16 @@ class Delete extends FsCommand {
/** remove any path */
static class Rmr extends Rm {
public static final String NAME = "rmr";
- public static final String USAGE = Rm.USAGE;
- public static final String DESCRIPTION =
- "Remove all directories which match the specified file\n" +
- "pattern. Equivalent to the Unix command \"rm -rf <src>\"\n" +
- "-skipTrash option bypasses trash, if enabled, and immediately\n" +
- "deletes <src>";
protected void processOptions(LinkedList<String> args) throws IOException {
- args.addFirst("-R");
+ args.addFirst("-r");
super.processOptions(args);
}
+
+ @Override
+ public String getReplacementCommand() {
+ return "rm -r";
+ }
}
/** empty the trash */
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsCommand.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsCommand.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsCommand.java
(original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsCommand.java Tue
May 17 00:53:51 2011
@@ -48,6 +48,7 @@ abstract public class FsCommand extends
factory.registerCommands(Delete.class);
factory.registerCommands(Display.class);
factory.registerCommands(FsShellPermissions.class);
+ factory.registerCommands(FsUsage.class);
factory.registerCommands(Ls.class);
factory.registerCommands(Mkdir.class);
factory.registerCommands(SetReplication.class);
Added: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsUsage.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsUsage.java?rev=1103968&view=auto
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsUsage.java (added)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/FsUsage.java Tue
May 17 00:53:51 2011
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.shell;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.FsStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.StringUtils;
+
+/** Base class for commands related to viewing filesystem usage, such as
+ * du and df
+ */
[email protected]
[email protected]
+
+class FsUsage extends FsCommand {
+ public static void registerCommands(CommandFactory factory) {
+ factory.addClass(Df.class, "-df");
+ factory.addClass(Du.class, "-du");
+ factory.addClass(Dus.class, "-dus");
+ }
+
+ protected boolean humanReadable = false;
+ protected TableBuilder usagesTable;
+
+ protected String formatSize(long size) {
+ return humanReadable
+ ? StringUtils.humanReadableInt(size)
+ : String.valueOf(size);
+ }
+
+ /** Show the size of a partition in the filesystem */
+ public static class Df extends FsUsage {
+ public static final String NAME = "df";
+ public static final String USAGE = "[<path> ...]";
+ public static final String DESCRIPTION =
+ "Shows the capacity, free and used space of the filesystem.\n"+
+ "If the filesystem has multiple partitions, and no path to a\n" +
+ "particular partition is specified, then the status of the root\n" +
+ "partitions will be shown.";
+
+ @Override
+ protected void processOptions(LinkedList<String> args)
+ throws IOException {
+ CommandFormat cf = new CommandFormat(null, 0, Integer.MAX_VALUE, "h");
+ cf.parse(args);
+ humanReadable = cf.getOpt("h");
+ if (args.isEmpty()) args.add(Path.SEPARATOR);
+ }
+
+ @Override
+ protected void processArguments(LinkedList<PathData> args)
+ throws IOException {
+ usagesTable = new TableBuilder(
+ "Filesystem", "Size", "Used", "Available", "Use%");
+ usagesTable.setRightAlign(1, 2, 3, 4);
+
+ super.processArguments(args);
+ if (!usagesTable.isEmpty()) {
+ usagesTable.printToStream(out);
+ }
+ }
+
+ @Override
+ protected void processPath(PathData item) throws IOException {
+ FsStatus fsStats = item.fs.getStatus(item.path);
+ long size = fsStats.getCapacity();
+ long used = fsStats.getUsed();
+ long free = fsStats.getRemaining();
+
+ usagesTable.addRow(
+ item.fs.getUri(),
+ formatSize(size),
+ formatSize(used),
+ formatSize(free),
+ StringUtils.formatPercent((double)used/(double)size, 0)
+ );
+ }
+ }
+
+ /** show disk usage */
+ public static class Du extends FsUsage {
+ public static final String NAME = "du";
+ public static final String USAGE = "[-s] [-h] <path> ...";
+ public static final String DESCRIPTION =
+ "Show the amount of space, in bytes, used by the files that\n" +
+ "match the specified file pattern. The following flags are optional:\n" +
+ " -s Rather than showing the size of each individual file that\n" +
+ " matches the pattern, shows the total (summary) size.\n" +
+ " -h Formats the sizes of files in a human-readable fashion\n" +
+ " rather than a number of bytes.\n\n" +
+ "Note that, even without the -s option, this only shows size summaries\n" +
+ "one level deep into a directory.\n" +
+ "The output is in the form \n" +
+ "\tsize\tname(full path)\n";
+
+ protected boolean summary = false;
+
+ @Override
+ protected void processOptions(LinkedList<String> args) throws IOException {
+ CommandFormat cf = new CommandFormat(null, 0, Integer.MAX_VALUE, "h",
"s");
+ cf.parse(args);
+ humanReadable = cf.getOpt("h");
+ summary = cf.getOpt("s");
+ if (args.isEmpty()) args.add(Path.CUR_DIR);
+ }
+
+ @Override
+ protected void processPathArgument(PathData item) throws IOException {
+ usagesTable = new TableBuilder(2);
+ // go one level deep on dirs from cmdline unless in summary mode
+ if (!summary && item.stat.isDirectory()) {
+ recursePath(item);
+ } else {
+ super.processPathArgument(item);
+ }
+ usagesTable.printToStream(out);
+ }
+
+ @Override
+ protected void processPath(PathData item) throws IOException {
+ long length;
+ if (item.stat.isDirectory()) {
+ length = item.fs.getContentSummary(item.path).getLength();
+ } else {
+ length = item.stat.getLen();
+ }
+ usagesTable.addRow(formatSize(length), item);
+ }
+ }
+
+ /** show disk usage summary */
+ public static class Dus extends Du {
+ public static final String NAME = "dus";
+
+ @Override
+ protected void processOptions(LinkedList<String> args) throws IOException {
+ args.addFirst("-s");
+ super.processOptions(args);
+ }
+
+ @Override
+ public String getReplacementCommand() {
+ return "du -s";
+ }
+ }
+
+ /**
+ * Creates a table of aligned values based on the maximum width of each
+ * column as a string
+ */
+ private static class TableBuilder {
+ protected boolean hasHeader = false;
+ protected List<String[]> rows;
+ protected int[] widths;
+ protected boolean[] rightAlign;
+
+ /**
+ * Create a table w/o headers
+ * @param columns number of columns
+ */
+ public TableBuilder(int columns) {
+ rows = new ArrayList<String[]>();
+ widths = new int[columns];
+ rightAlign = new boolean[columns];
+ }
+
+ /**
+ * Create a table with headers
+ * @param headers list of headers
+ */
+ public TableBuilder(Object ... headers) {
+ this(headers.length);
+ this.addRow(headers);
+ hasHeader = true;
+ }
+
+ /**
+ * Change the default left-align of columns
+ * @param indexes of columns to right align
+ */
+ public void setRightAlign(int ... indexes) {
+ for (int i : indexes) rightAlign[i] = true;
+ }
+
+ /**
+ * Add a row of objects to the table
+ * @param objects the values
+ */
+ public void addRow(Object ... objects) {
+ String[] row = new String[widths.length];
+ for (int col=0; col < widths.length; col++) {
+ row[col] = String.valueOf(objects[col]);
+ widths[col] = Math.max(widths[col], row[col].length());
+ }
+ rows.add(row);
+ }
+
+ /**
+ * Render the table to a stream
+ * @param out PrintStream for output
+ */
+ public void printToStream(PrintStream out) {
+ if (isEmpty()) return;
+
+ StringBuilder fmt = new StringBuilder();
+ for (int i=0; i < widths.length; i++) {
+ if (fmt.length() != 0) fmt.append(" ");
+ if (rightAlign[i]) {
+ fmt.append("%"+widths[i]+"s");
+ } else if (i != widths.length-1) {
+ fmt.append("%-"+widths[i]+"s");
+ } else {
+ // prevent trailing spaces if the final column is left-aligned
+ fmt.append("%s");
+ }
+ }
+
+ for (Object[] row : rows) {
+ out.println(String.format(fmt.toString(), row));
+ }
+ }
+
+ /**
+ * Number of rows excluding header
+ * @return rows
+ */
+ public int size() {
+ return rows.size() - (hasHeader ? 1 : 0);
+ }
+
+ /**
+ * Does table have any rows
+ * @return boolean
+ */
+ public boolean isEmpty() {
+ return size() == 0;
+ }
+ }
+}
\ No newline at end of file
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Ls.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Ls.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Ls.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Ls.java Tue May 17
00:53:51 2011
@@ -41,7 +41,7 @@ class Ls extends FsCommand {
}
public static final String NAME = "ls";
- public static final String USAGE = "[<path> ...]";
+ public static final String USAGE = "[-R] [<path> ...]";
public static final String DESCRIPTION =
"List the contents that match the specified file pattern. If\n" +
"path is not specified, the contents of /user/<currentUser>\n" +
@@ -50,7 +50,8 @@ class Ls extends FsCommand {
"and file entries are of the form \n" +
"\tfileName(full path) <r n> size \n" +
"where n is the number of replicas specified for the file \n" +
- "and size is the size of the file, in bytes.";
+ "and size is the size of the file, in bytes.\n" +
+ " -R Recursively list the contents of directories";
protected static final SimpleDateFormat dateFormat =
new SimpleDateFormat("yyyy-MM-dd HH:mm");
@@ -135,12 +136,6 @@ class Ls extends FsCommand {
*/
public static class Lsr extends Ls {
public static final String NAME = "lsr";
- public static final String USAGE = Ls.USAGE;
- public static final String DESCRIPTION =
- "Recursively list the contents that match the specified\n" +
- "file pattern. Behaves very similarly to hadoop fs -ls,\n" +
- "except that the data is shown for all the entries in the\n" +
- "subtree.";
@Override
protected void processOptions(LinkedList<String> args)
@@ -148,5 +143,10 @@ class Ls extends FsCommand {
args.addFirst("-R");
super.processOptions(args);
}
+
+ @Override
+ public String getReplacementCommand() {
+ return "ls -R";
+ }
}
}
Modified: hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Stat.java
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Stat.java?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Stat.java (original)
+++ hadoop/common/trunk/src/java/org/apache/hadoop/fs/shell/Stat.java Tue May
17 00:53:51 2011
@@ -93,7 +93,7 @@ class Stat extends FsCommand {
: (stat.isFile() ? "regular file" : "symlink"));
break;
case 'n':
- buf.append(item);
+ buf.append(item.path.getName());
break;
case 'o':
buf.append(stat.getBlockSize());
Modified: hadoop/common/trunk/src/test/core/org/apache/hadoop/cli/testConf.xml
URL:
http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/core/org/apache/hadoop/cli/testConf.xml?rev=1103968&r1=1103967&r2=1103968&view=diff
==============================================================================
--- hadoop/common/trunk/src/test/core/org/apache/hadoop/cli/testConf.xml
(original)
+++ hadoop/common/trunk/src/test/core/org/apache/hadoop/cli/testConf.xml Tue
May 17 00:53:51 2011
@@ -54,7 +54,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-ls \[<path> \.\.\.\]:( |\t)*List the
contents that match the specified file pattern. If( )*</expected-output>
+ <expected-output>^-ls \[-R\] \[<path> \.\.\.\]:( |\t)*List the
contents that match the specified file pattern. If( )*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@@ -101,19 +101,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-lsr \[<path> \.\.\.\]:( |\t)*Recursively
list the contents that match the specified( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*file pattern.( |\t)*Behaves very similarly
to hadoop fs -ls,( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*except that the data is shown for all the
entries in the( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*subtree.( )*</expected-output>
+ <expected-output>^-lsr:\s+\(DEPRECATED\) Same as 'ls
-R'</expected-output>
</comparator>
</comparators>
</test>
@@ -152,7 +140,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-du \[-s\] \[-h\] <path>:\s+Show the amount
of space, in bytes, used by the files that\s*</expected-output>
+ <expected-output>^-du \[-s\] \[-h\] <path> \.\.\.:\s+Show the
amount of space, in bytes, used by the files that\s*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@@ -203,11 +191,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-dus <path>:( |\t)*Show the amount of space,
in bytes, used by the files that( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*match the specified file pattern. This is
equivalent to -du -s above.</expected-output>
+ <expected-output>^-dus:\s+\(DEPRECATED\) Same as 'du
-s'</expected-output>
</comparator>
</comparators>
</test>
@@ -299,7 +283,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-rm \[-skipTrash\] <src> \.\.\.:(
|\t)*Delete all files that match the specified file pattern.(
)*</expected-output>
+ <expected-output>^-rm \[-r\|-R\] \[-skipTrash\] <src> \.\.\.:(
|\t)*Delete all files that match the specified file pattern.(
)*</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
@@ -326,19 +310,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^-rmr \[-skipTrash\] <src> \.\.\.:(
|\t)*Remove all directories which match the specified file( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*pattern. Equivalent to the Unix command "rm
-rf <src>"( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*-skipTrash option bypasses trash, if
enabled, and immediately( )*</expected-output>
- </comparator>
- <comparator>
- <type>RegexpComparator</type>
- <expected-output>^( |\t)*deletes <src>( )*</expected-output>
+ <expected-output>^-rmr:\s+\(DEPRECATED\) Same as 'rm
-r'</expected-output>
</comparator>
</comparators>
</test>