This is an automated email from the ASF dual-hosted git repository.
weichiu pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 6f5a36c HADOOP-13656. fs -expunge to take a filesystem. Contributed
by Shweta.
6f5a36c is described below
commit 6f5a36c13c4d2eb24e207aa99a9781c0be11660b
Author: Shweta Yakkali <[email protected]>
AuthorDate: Thu May 30 13:20:44 2019 -0700
HADOOP-13656. fs -expunge to take a filesystem. Contributed by Shweta.
Signed-off-by: Wei-Chiu Chuang <[email protected]>
---
.../main/java/org/apache/hadoop/fs/FileSystem.java | 6 +
.../main/java/org/apache/hadoop/fs/FsShell.java | 4 +-
.../java/org/apache/hadoop/fs/shell/Delete.java | 16 +-
.../src/site/markdown/FileSystemShell.md | 11 +-
.../test/java/org/apache/hadoop/fs/TestTrash.java | 326 ++++++++++++---------
.../apache/hadoop/fs/viewfs/TestViewFsTrash.java | 2 +-
.../org/apache/hadoop/hdfs/tools/DFSAdmin.java | 13 +-
.../java/org/apache/hadoop/hdfs/TestDFSShell.java | 3 -
.../java/org/apache/hadoop/hdfs/TestHDFSTrash.java | 2 +-
.../hdfs/server/namenode/TestCheckpoint.java | 2 -
10 files changed, 215 insertions(+), 170 deletions(-)
diff --git
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index 19f38af..4580451 100644
---
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -206,6 +206,12 @@ public abstract class FileSystem extends Configured
CACHE.map.put(new Cache.Key(uri, conf), fs);
}
+ @VisibleForTesting
+ static void removeFileSystemForTesting(URI uri, Configuration conf,
+ FileSystem fs) throws IOException {
+ CACHE.map.remove(new Cache.Key(uri, conf), fs);
+ }
+
/**
* Get a FileSystem instance based on the uri, the passed in
* configuration and the user.
diff --git
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 5be6e5f..680e742 100644
---
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -97,7 +97,7 @@ public class FsShell extends Configured implements Tool {
return this.help;
}
- protected void init() throws IOException {
+ protected void init() {
getConf().setQuietMode(true);
UserGroupInformation.setConfiguration(getConf());
if (commandFactory == null) {
@@ -298,7 +298,7 @@ public class FsShell extends Configured implements Tool {
* run
*/
@Override
- public int run(String argv[]) throws Exception {
+ public int run(String[] argv) {
// initialize FsShell
init();
Tracer tracer = new Tracer.Builder("FsShell").
diff --git
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
index 57b543a..3c9368c 100644
---
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
+++
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Delete.java
@@ -25,6 +25,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.PathIOException;
@@ -218,26 +219,35 @@ class Delete {
// delete files from the trash that are older
// than the retention threshold.
static class Expunge extends FsCommand {
+ private static final String OPTION_FILESYSTEM = "fs";
+
public static final String NAME = "expunge";
public static final String USAGE =
- "[-immediate]";
+ "[-immediate] [-" + OPTION_FILESYSTEM + " <path>]";
public static final String DESCRIPTION =
"Delete files from the trash that are older " +
"than the retention threshold";
private boolean emptyImmediately = false;
+ private String fsArgument;
- // TODO: should probably allow path arguments for the filesystems
@Override
protected void processOptions(LinkedList<String> args) throws IOException {
- CommandFormat cf = new CommandFormat(0, 1, "immediate");
+ CommandFormat cf = new CommandFormat(0, 2, "immediate");
+ cf.addOptionWithValue(OPTION_FILESYSTEM);
cf.parse(args);
emptyImmediately = cf.getOpt("immediate");
+ fsArgument = cf.getOptValue(OPTION_FILESYSTEM);
}
@Override
protected void processArguments(LinkedList<PathData> args)
throws IOException {
+ if (fsArgument != null && fsArgument.length() != 0) {
+ getConf().set(
+ CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY, fsArgument);
+ }
+
FileSystem[] childFileSystems =
FileSystem.get(getConf()).getChildFileSystems();
if (null != childFileSystems) {
diff --git
a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
index f050e30..44927f2 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/FileSystemShell.md
@@ -268,7 +268,7 @@ Displays a summary of file lengths.
expunge
-------
-Usage: `hadoop fs -expunge [-immediate]`
+Usage: `hadoop fs -expunge [-immediate] [-fs <path>]`
Permanently delete files in checkpoints older than the retention threshold
from trash directory, and create new checkpoint.
@@ -286,6 +286,15 @@ This value should be smaller or equal to
`fs.trash.interval`.
If the `-immediate` option is passed, all files in the trash for the current
user are immediately deleted, ignoring the `fs.trash.interval` setting.
+If the `-fs` option is passed, the supplied filesystem will be expunged,
+rather than the default filesystem and checkpoint is created.
+
+For example
+
+```
+hadoop fs -expunge --immediate -fs s3a://landsat-pds/
+```
+
Refer to the
[HDFS Architecture
guide](../hadoop-hdfs/HdfsDesign.html#File_Deletes_and_Undeletes)
for more information about trash feature of HDFS.
diff --git
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
index cf22f3b..e8e0287 100644
---
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
+++
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
@@ -38,7 +38,10 @@ import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.TrashPolicyDefault.Emptier;
@@ -112,7 +115,7 @@ public class TestTrash {
* @throws IOException
*/
public static void trashShell(final FileSystem fs, final Path base)
- throws IOException {
+ throws Exception {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", fs.getUri().toString());
trashShell(conf, base, null, null);
@@ -127,8 +130,7 @@ public class TestTrash {
* @throws IOException
*/
public static void trashShell(final Configuration conf, final Path base,
- FileSystem trashRootFs, Path trashRoot)
- throws IOException {
+ FileSystem trashRootFs, Path trashRoot) throws Exception {
FileSystem fs = FileSystem.get(conf);
conf.setLong(FS_TRASH_INTERVAL_KEY, 0); // disabled
@@ -163,13 +165,9 @@ public class TestTrash {
String[] args = new String[1];
args[0] = "-expunge";
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Expunge should return zero", 0, val);
}
// Verify that we succeed in removing the file we created.
@@ -179,15 +177,10 @@ public class TestTrash {
args[0] = "-rm";
args[1] = myFile.toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Remove should return zero", 0, val);
-
checkTrash(trashRootFs, trashRoot, fs.makeQualified(myFile));
}
@@ -200,13 +193,9 @@ public class TestTrash {
args[0] = "-rm";
args[1] = new Path(base, "test/mkdirs/myFile").toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Remove should return zero", 0, val);
}
// Verify that we can recreate the file
@@ -219,13 +208,9 @@ public class TestTrash {
args[0] = "-rmr";
args[1] = new Path(base, "test/mkdirs").toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Recursive Remove should return zero", 0, val);
}
// recreate directory
@@ -237,29 +222,22 @@ public class TestTrash {
args[0] = "-rmr";
args[1] = new Path(base, "test/mkdirs").toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Recursive Remove should return zero", 0, val);
}
// Check that we can delete a file from the trash
{
- Path toErase = new Path(trashRoot, "toErase");
- int retVal = -1;
- writeFile(trashRootFs, toErase, 10);
- try {
- retVal = shell.run(new String[] {"-rm", toErase.toString()});
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(retVal == 0);
- checkNotInTrash (trashRootFs, trashRoot, toErase.toString());
- checkNotInTrash (trashRootFs, trashRoot, toErase.toString()+".1");
+ Path toErase = new Path(trashRoot, "toErase");
+ int val = -1;
+ writeFile(trashRootFs, toErase, 10);
+
+ val = shell.run(new String[] {"-rm", toErase.toString()});
+
+ assertEquals("Recursive Remove should return zero", 0, val);
+ checkNotInTrash(trashRootFs, trashRoot, toErase.toString());
+ checkNotInTrash(trashRootFs, trashRoot, toErase.toString()+".1");
}
// simulate Trash removal
@@ -267,17 +245,14 @@ public class TestTrash {
String[] args = new String[1];
args[0] = "-expunge";
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Expunge should return zero", 0, val);
}
// verify that after expunging the Trash, it really goes away
- checkNotInTrash(trashRootFs, trashRoot, new Path(base,
"test/mkdirs/myFile").toString());
+ checkNotInTrash(trashRootFs, trashRoot, new Path(
+ base, "test/mkdirs/myFile").toString());
// recreate directory and file
mkdir(fs, myPath);
@@ -289,26 +264,18 @@ public class TestTrash {
args[0] = "-rm";
args[1] = myFile.toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Remove should return zero", 0, val);
checkTrash(trashRootFs, trashRoot, myFile);
args = new String[2];
args[0] = "-rmr";
args[1] = myPath.toString();
val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val == 0);
+ val = shell.run(args);
+
+ assertEquals("Recursive Remove should return zero", 0, val);
checkTrash(trashRootFs, trashRoot, myPath);
}
@@ -318,13 +285,9 @@ public class TestTrash {
args[0] = "-rmr";
args[1] = trashRoot.getParent().getParent().toString();
int val = -1;
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertEquals("exit code", 1, val);
+ val = shell.run(args);
+
+ assertEquals("Recursive Remove should return exit code 1", 1, val);
assertTrue(trashRootFs.exists(trashRoot));
}
@@ -341,23 +304,18 @@ public class TestTrash {
args[1] = "-skipTrash";
args[2] = myFile.toString();
int val = -1;
- try {
- // Clear out trash
- assertEquals("-expunge failed",
- 0, shell.run(new String [] { "-expunge" } ));
-
- val = shell.run(args);
-
- }catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
+ // Clear out trash
+ assertEquals("-expunge failed",
+ 0, shell.run(new String[] {"-expunge" }));
+
+ val = shell.run(args);
+
assertFalse("Expected TrashRoot (" + trashRoot +
") to exist in file system:"
+ trashRootFs.getUri(),
trashRootFs.exists(trashRoot)); // No new Current should be created
assertFalse(fs.exists(myFile));
- assertTrue(val == 0);
+ assertEquals("Remove with skipTrash should return zero", 0, val);
}
// recreate directory and file
@@ -372,64 +330,52 @@ public class TestTrash {
args[2] = myPath.toString();
int val = -1;
- try {
- // Clear out trash
- assertEquals(0, shell.run(new String [] { "-expunge" } ));
-
- val = shell.run(args);
+ // Clear out trash
+ assertEquals(0, shell.run(new String[] {"-expunge" }));
- }catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
+ val = shell.run(args);
assertFalse(trashRootFs.exists(trashRoot)); // No new Current should be
created
assertFalse(fs.exists(myPath));
assertFalse(fs.exists(myFile));
- assertTrue(val == 0);
+ assertEquals("Remove with skipTrash should return zero", 0, val);
}
// deleting same file multiple times
{
int val = -1;
mkdir(fs, myPath);
-
- try {
- assertEquals(0, shell.run(new String [] { "-expunge" } ));
- } catch (Exception e) {
- System.err.println("Exception raised from fs expunge " +
- e.getLocalizedMessage());
- }
+ assertEquals("Expunge should return zero",
+ 0, shell.run(new String[] {"-expunge" }));
+
// create a file in that directory.
myFile = new Path(base, "test/mkdirs/myFile");
- String [] args = new String[] {"-rm", myFile.toString()};
+ String[] args = new String[] {"-rm", myFile.toString()};
int num_runs = 10;
- for(int i=0;i<num_runs; i++) {
+ for(int i=0; i<num_runs; i++) {
//create file
writeFile(fs, myFile, 10);
// delete file
- try {
- val = shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
- assertTrue(val==0);
+ val = shell.run(args);
+
+ assertEquals("Remove should return zero", 0, val);
}
// current trash directory
Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()),
- new Path(myFile.getParent().toUri().getPath()));
+ new Path(myFile.getParent().toUri().getPath()));
- System.out.println("Deleting same myFile: myFile.parent=" +
myFile.getParent().toUri().getPath() +
+ System.out.println("Deleting same myFile: myFile.parent="
+ + myFile.getParent().toUri().getPath() +
"; trashroot="+trashRoot.toUri().getPath() +
"; trashDir=" + trashDir.toUri().getPath());
int count = countSameDeletedFiles(fs, trashDir, myFile);
- System.out.println("counted " + count + " files " + myFile.getName() +
"* in " + trashDir);
- assertTrue(count==num_runs);
+ System.out.println("counted " + count + " files "
+ + myFile.getName() + "* in " + trashDir);
+ assertEquals("Count should have returned 10", num_runs, count);
}
//Verify skipTrash option is suggested when rm fails due to its absence
@@ -443,19 +389,17 @@ public class TestTrash {
PrintStream newOut = new PrintStream(byteStream);
System.setOut(newOut);
System.setErr(newOut);
- try {
- shell.run(args);
- } catch (Exception e) {
- System.err.println("Exception raised from Trash.run " +
- e.getLocalizedMessage());
- }
+
+ shell.run(args);
+
String output = byteStream.toString();
System.setOut(stdout);
System.setErr(stderr);
assertTrue("skipTrash wasn't suggested as remedy to failed rm command" +
" or we deleted / even though we could not get server defaults",
output.indexOf("Consider using -skipTrash option") != -1 ||
- output.indexOf("Failed to determine server trash configuration") !=
-1);
+ output.indexOf("Failed to determine server "
+ + "trash configuration") != -1);
}
// Verify old checkpoint format is recognized
@@ -475,13 +419,9 @@ public class TestTrash {
// Clear out trash
int rc = -1;
- try {
- rc = shell.run(new String [] { "-expunge" } );
- } catch (Exception e) {
- System.err.println("Exception raised from fs expunge " +
- e.getLocalizedMessage());
- }
- assertEquals(0, rc);
+ rc = shell.run(new String[] {"-expunge" });
+
+ assertEquals("Expunge should return zero", 0, rc);
assertFalse("old checkpoint format not recognized",
trashRootFs.exists(dirToDelete));
assertTrue("old checkpoint format directory should not be removed",
@@ -507,12 +447,8 @@ public class TestTrash {
// Clear out trash
int rc = -1;
- try {
- rc = shell.run(new String[] {"-expunge", "-immediate"});
- } catch (Exception e) {
- fail("Unexpected exception running the trash shell: " +
- e.getLocalizedMessage());
- }
+ rc = shell.run(new String[] {"-expunge", "-immediate"});
+
assertEquals("Expunge immediate should return zero", 0, rc);
assertFalse("Old checkpoint should be removed",
trashRootFs.exists(oldCheckpoint));
@@ -520,8 +456,87 @@ public class TestTrash {
trashRootFs.exists(recentCheckpoint));
assertFalse("Current folder should be removed",
trashRootFs.exists(currentFolder));
- assertEquals("Ensure trash folder is empty",
- trashRootFs.listStatus(trashRoot.getParent()).length, 0);
+ assertEquals("Ensure trash folder is empty", 0,
+ trashRootFs.listStatus(trashRoot.getParent()).length);
+ }
+ }
+
+ @Test
+ public void testExpungeWithFileSystem() throws Exception {
+ {
+ Configuration config = new Configuration();
+
+ config.setClass("fs.testlfs.impl", TestLFS.class, FileSystem.class);
+ TestLFS testlfs = new TestLFS();
+ testlfs.setUri("testlfs:/");
+ URI testlfsURI = testlfs.getUri();
+ config.set("fs.defaultFS", testlfsURI.toString());
+
+ config.setLong(FS_TRASH_INTERVAL_KEY, 10); // 10 minute
+ assertTrue(new Trash(config).isEnabled());
+
+
+ FileSystem.addFileSystemForTesting(testlfsURI, config, testlfs);
+ testlfs.initialize(testlfsURI, config);
+
+ FsShell testlfsshell = new FsShell();
+ testlfsshell.setConf(config);
+
+ Path trashRoot = testlfsshell.getCurrentTrashDir();
+
+ long trashInterval = config.getLong(FS_TRASH_INTERVAL_KEY,
+ FS_TRASH_INTERVAL_DEFAULT);
+ long now = Time.now();
+ DateFormat checkpointFormat = new SimpleDateFormat("yyMMddHHmm");
+ Path oldCheckpoint = new Path(trashRoot.getParent(),
+ checkpointFormat.format(now - (trashInterval * 60 * 1000) - 1));
+ Path recentCheckpoint = new Path(trashRoot.getParent(),
+ checkpointFormat.format(now));
+ Path currentFolder = new Path(trashRoot.getParent(), "Current");
+
+ Path myPath = new Path(TEST_DIR, "test/mkdirs");
+
+ Path myFile = new Path(TEST_DIR, "test/mkdirs/testFile");
+
+ mkdir(testlfs, oldCheckpoint);
+ mkdir(testlfs, recentCheckpoint);
+ mkdir(testlfs, currentFolder);
+ mkdir(testlfs, myPath);
+ writeFile(testlfs, myFile, 10);
+
+ String[] args = new String[]{"-expunge", "-immediate",
+ "-fs", "testlfs:/"};
+ int val = testlfsshell.run(args);
+
+ assertEquals("Expunge immediate with filesystem should return zero",
+ 0, val);
+ assertFalse("Old checkpoint should be removed",
+ testlfs.exists(oldCheckpoint));
+ assertFalse("Recent checkpoint should be removed",
+ testlfs.exists(recentCheckpoint));
+ assertFalse("Current folder should be removed",
+ testlfs.exists(currentFolder));
+ assertEquals("Ensure trash folder is empty", 0,
+ testlfs.listStatus(trashRoot.getParent()).length);
+
+ // Incorrect FileSystem scheme
+ String incorrectFS = "incorrectfs:/";
+ args = new String[]{"-expunge", "-immediate",
+ "-fs", incorrectFS};
+ val = testlfsshell.run(args);
+
+ assertEquals("Expunge immediate should return exit code 1 when "
+ + "incorrect Filesystem is passed",
+ 1, val);
+
+ // Empty FileSystem scheme
+ args = new String[]{"-expunge", "-immediate",
+ "-fs", ""};
+ val = testlfsshell.run(args);
+
+ assertNotEquals("Expunge immediate should fail when filesystem is NULL",
+ 0, val);
+ FileSystem.removeFileSystemForTesting(testlfsURI, config, testlfs);
}
}
@@ -551,7 +566,7 @@ public class TestTrash {
}
@Test
- public void testTrash() throws IOException {
+ public void testTrash() throws Exception {
Configuration conf = new Configuration();
conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class);
trashShell(FileSystem.getLocal(conf), TEST_DIR);
@@ -779,6 +794,7 @@ public class TestTrash {
}
static class TestLFS extends LocalFileSystem {
+ private URI uriName = null;
Path home;
TestLFS() {
this(TEST_DIR);
@@ -801,6 +817,24 @@ public class TestTrash {
public Path getHomeDirectory() {
return home;
}
+
+ @Override
+ public URI getUri() {
+ if(uriName == null){
+ return super.getUri();
+ } else {
+ return uriName;
+ }
+ }
+
+ @Override
+ public String getScheme() {
+ return "testlfs";
+ }
+
+ public void setUri(String uri){
+ uriName = URI.create(uri);
+ }
}
/**
diff --git
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
index 62ef9d1..94c3262 100644
---
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
+++
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestViewFsTrash.java
@@ -70,7 +70,7 @@ public class TestViewFsTrash {
}
@Test
- public void testTrash() throws IOException {
+ public void testTrash() throws Exception {
TestTrash.trashShell(conf, fileSystemTestHelper.getTestRootPath(fsView),
fsTarget, new Path(fsTarget.getHomeDirectory(), ".Trash/Current"));
}
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
index d1f8362..78c02a6 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
@@ -2194,7 +2194,7 @@ public class DFSAdmin extends FsShell {
* @return 0 on success, non zero on error.
*/
@Override
- public int run(String[] argv) throws Exception {
+ public int run(String[] argv) {
if (argv.length < 1) {
printUsage("");
@@ -2346,16 +2346,7 @@ public class DFSAdmin extends FsShell {
}
// initialize DFSAdmin
- try {
- init();
- } catch (RPC.VersionMismatch v) {
- System.err.println("Version Mismatch between client and server"
- + "... command aborted.");
- return exitCode;
- } catch (IOException e) {
- System.err.println("Bad connection to DFS... command aborted.");
- return exitCode;
- }
+ init();
Exception debugException = null;
exitCode = 0;
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
index 76179bc..6503251 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java
@@ -1302,9 +1302,6 @@ public class TestDFSShell {
exitCode = shell.run(args);
LOG.info("RUN: "+args[0]+" exit=" + exitCode);
return exitCode;
- } catch (IOException e) {
- LOG.error("RUN: "+args[0]+" IOException="+e.getMessage());
- throw e;
} catch (RuntimeException e) {
LOG.error("RUN: "+args[0]+" RuntimeException="+e.getMessage());
throw e;
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java
index fb28726..5dbb124 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSTrash.java
@@ -98,7 +98,7 @@ public class TestHDFSTrash {
}
@Test
- public void testTrash() throws IOException {
+ public void testTrash() throws Exception {
TestTrash.trashShell(cluster.getFileSystem(), new Path("/"));
}
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
index 8aa5347..4618f4b 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
@@ -1155,8 +1155,6 @@ public class TestCheckpoint {
String[] args = new String[]{"-saveNamespace"};
try {
admin.run(args);
- } catch(IOException eIO) {
- assertTrue(eIO.getLocalizedMessage().contains("Safe mode should be
turned ON"));
} catch(Exception e) {
throw new IOException(e);
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]