This is an automated email from the ASF dual-hosted git repository.
ayushsaxena pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 4c53fb9 HDFS-15338. listOpenFiles() should throw InvalidPathException
in case of invalid paths. Contributed by Jinglun.
4c53fb9 is described below
commit 4c53fb9ce102c46c6956b4aecdfd9dd513280b35
Author: Ayush Saxena <[email protected]>
AuthorDate: Mon May 11 16:48:34 2020 +0530
HDFS-15338. listOpenFiles() should throw InvalidPathException in case of
invalid paths. Contributed by Jinglun.
---
.../apache/hadoop/hdfs/DistributedFileSystem.java | 3 ++-
.../hadoop/hdfs/server/namenode/FSNamesystem.java | 1 +
.../apache/hadoop/hdfs/server/namenode/INode.java | 2 +-
.../hdfs/server/namenode/TestListOpenFiles.java | 26 ++++++++++++++++++++++
4 files changed, 30 insertions(+), 2 deletions(-)
diff --git
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index a6d4758..b4a932e 100644
---
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -3556,7 +3556,8 @@ public class DistributedFileSystem extends FileSystem
public RemoteIterator<OpenFileEntry> listOpenFiles(
EnumSet<OpenFilesType> openFilesTypes, String path) throws IOException {
- return dfs.listOpenFiles(openFilesTypes, path);
+ Path absF = fixRelativePart(new Path(path));
+ return dfs.listOpenFiles(openFilesTypes, getPathName(absF));
}
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
index 2334e7b..5e50b58 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java
@@ -1922,6 +1922,7 @@ public class FSNamesystem implements Namesystem,
FSNamesystemMBean,
*/
BatchedListEntries<OpenFileEntry> listOpenFiles(long prevId,
EnumSet<OpenFilesType> openFilesTypes, String path) throws IOException {
+ INode.checkAbsolutePath(path);
final String operationName = "listOpenFiles";
checkSuperuserPrivilege();
checkOperation(OperationCategory.READ);
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index a9f2035..6545777 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -828,7 +828,7 @@ public abstract class INode implements INodeAttributes,
Diff.Element<byte[]> {
return path != null && path.startsWith(Path.SEPARATOR);
}
- private static void checkAbsolutePath(final String path) {
+ static void checkAbsolutePath(final String path) {
if (!isValidAbsolutePath(path)) {
throw new AssertionError("Absolute path required, but got '"
+ path + "'");
diff --git
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java
index 2158bc7..c6603cf 100644
---
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java
+++
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java
@@ -17,6 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.namenode;
+import static org.apache.hadoop.test.LambdaTestUtils.intercept;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@@ -295,4 +296,29 @@ public class TestListOpenFiles {
verifyOpenFiles(openFiles, OpenFilesIterator.FILTER_PATH_DEFAULT);
}
}
+
+ @Test
+ public void testListOpenFilesWithInvalidPathServerSide() throws Exception {
+ HashMap<Path, FSDataOutputStream> openFiles = new HashMap<>();
+ openFiles.putAll(
+ DFSTestUtil.createOpenFiles(fs, new Path("/base"), "open-1", 1));
+ verifyOpenFiles(openFiles, EnumSet.of(OpenFilesType.ALL_OPEN_FILES),
+ "/base");
+ intercept(AssertionError.class, "Absolute path required",
+ "Expect InvalidPathException", () -> verifyOpenFiles(new HashMap<>(),
+ EnumSet.of(OpenFilesType.ALL_OPEN_FILES), "hdfs://cluster/base"));
+ while(openFiles.size() > 0) {
+ DFSTestUtil.closeOpenFiles(openFiles, 1);
+ verifyOpenFiles(openFiles);
+ }
+ }
+
+ @Test
+ public void testListOpenFilesWithInvalidPathClientSide() throws Exception {
+ intercept(IllegalArgumentException.class, "Wrong FS",
+ "Expect IllegalArgumentException", () -> fs
+ .listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES),
+ "hdfs://non-cluster/"));
+ fs.listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES), "/path");
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]