Author: cutting Date: Mon Nov 20 16:09:06 2006 New Revision: 477423 URL: http://svn.apache.org/viewvc?view=rev&rev=477423 Log: HADOOP-733. Make exit codes in DFSShell consistent and add a unit test. Contributed by Dhruba.
Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java Modified: lucene/hadoop/trunk/CHANGES.txt lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Modified: lucene/hadoop/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=477423&r1=477422&r2=477423 ============================================================================== --- lucene/hadoop/trunk/CHANGES.txt (original) +++ lucene/hadoop/trunk/CHANGES.txt Mon Nov 20 16:09:06 2006 @@ -100,6 +100,10 @@ 30. HADOOP-447. In DFS, fix getBlockSize(Path) to work with relative paths. (Raghu Angadi via cutting) +31. HADOOP-733. Make exit codes in DFShell consistent and add a unit + test. (Dhruba Borthakur via cutting) + + Release 0.8.0 - 2006-11-03 1. HADOOP-477. Extend contrib/streaming to scan the PATH environment Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?view=diff&rev=477423&r1=477422&r2=477423 ============================================================================== --- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original) +++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Mon Nov 20 16:09:06 2006 @@ -216,7 +216,7 @@ Path items[] = fs.listPaths(src); if (items == null) { - System.out.println("Could not get listing for " + src); + throw new IOException("Could not get listing for " + src); } else { for (int i = 0; i < items.length; i++) { @@ -310,7 +310,7 @@ public void mkdir(String src) throws IOException { Path f = new Path(src); if (!fs.mkdirs(f)) { - System.out.println("Mkdirs failed to create " + src); + throw new IOException("Mkdirs failed to create " + src); } } @@ -499,15 +499,14 @@ /* delete an DFS file */ private void delete(Path src, boolean recursive ) throws IOException { if (fs.isDirectory(src) && !recursive) { - System.out.println("Cannot remove directory \"" + src + + throw new IOException("Cannot remove directory \"" + src + "\", use -rmr instead"); - return; } if (fs.delete(src)) { System.out.println("Deleted " + src); } else { - System.out.println("Delete failed " + src); + throw new IOException("Delete failed " + src); } } @@ -730,20 +729,20 @@ else copyMergeToLocal(argv[i++], new Path(argv[i++])); } else if ("-cat".equals(cmd)) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else if ("-moveToLocal".equals(cmd)) { moveToLocal(argv[i++], new Path(argv[i++])); } else if ("-setrep".equals(cmd)) { setReplication(argv, i); } else if ("-ls".equals(cmd)) { if (i < argv.length) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else { ls("", false); } } else if ("-lsr".equals(cmd)) { if (i < argv.length) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else { ls("", true); } @@ -752,17 +751,17 @@ } else if ("-cp".equals(cmd)) { exitCode = copy(argv, conf); } else if ("-rm".equals(cmd)) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else if ("-rmr".equals(cmd)) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else if ("-du".equals(cmd)) { if (i < argv.length) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else { du(""); } } else if ("-mkdir".equals(cmd)) { - doall(cmd, argv, conf, i); + exitCode = doall(cmd, argv, conf, i); } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); Added: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java?view=auto&rev=477423 ============================================================================== --- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java (added) +++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShell.java Mon Nov 20 16:09:06 2006 @@ -0,0 +1,128 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.dfs; + +import junit.framework.TestCase; +import java.io.*; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + + +/** + * This class tests commands from DFSShell. + * @author Dhruba Borthakur + */ +public class TestDFSShell extends TestCase { + + private void writeFile(FileSystem fileSys, Path name) throws IOException { + DataOutputStream stm = fileSys.create(name); + stm.writeBytes("dhruba"); + stm.close(); + } + + /** + * Tests various options of DFSShell. + */ + public void testDFSShell() throws IOException { + Configuration conf = new Configuration(); + MiniDFSCluster cluster = new MiniDFSCluster(65312, conf, 2, false); + FileSystem fileSys = cluster.getFileSystem(); + DFSShell shell = new DFSShell(); + shell.setConf(conf); + + try { + // First create a new directory with mkdirs + Path myPath = new Path("/test/mkdirs"); + assertTrue(fileSys.mkdirs(myPath)); + assertTrue(fileSys.exists(myPath)); + assertTrue(fileSys.mkdirs(myPath)); + + // Second, create a file in that directory. + Path myFile = new Path("/test/mkdirs/myFile"); + writeFile(fileSys, myFile); + + // Verify that we can read the file + { + String[] args = new String[2]; + args[0] = "-cat"; + args[1] = "/test/mkdirs/myFile"; + int val = -1; + try { + val = shell.run(args); + } catch (Exception e) { + System.err.println("Exception raised from DFSShell.run " + + e.getLocalizedMessage()); + } + assertTrue(val == 0); + } + + // Verify that we get an error while trying to read an nonexistent file + { + String[] args = new String[2]; + args[0] = "-cat"; + args[1] = "/test/mkdirs/myFile1"; + int val = -1; + try { + val = shell.run(args); + } catch (Exception e) { + System.err.println("Exception raised from DFSShell.run " + + e.getLocalizedMessage()); + } + assertTrue(val != 0); + } + + // Verify that we get an error while trying to delete an nonexistent file + { + String[] args = new String[2]; + args[0] = "-rm"; + args[1] = "/test/mkdirs/myFile1"; + int val = -1; + try { + val = shell.run(args); + } catch (Exception e) { + System.err.println("Exception raised from DFSShell.run " + + e.getLocalizedMessage()); + } + assertTrue(val != 0); + } + + // Verify that we succeed in removing the file we created + { + String[] args = new String[2]; + args[0] = "-rm"; + args[1] = "/test/mkdirs/myFile"; + int val = -1; + try { + val = shell.run(args); + } catch (Exception e) { + System.err.println("Exception raised from DFSShell.run " + + e.getLocalizedMessage()); + } + assertTrue(val == 0); + } + + } finally { + try { + fileSys.close(); + } catch (Exception e) { + } + cluster.shutdown(); + } + } +}