This is an automated email from the ASF dual-hosted git repository.
tasanuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git
The following commit(s) were added to refs/heads/trunk by this push:
new 736659e HDFS-15252. HttpFS: setWorkingDirectory should not accept
invalid paths. Contributed by hemanthboyina.
736659e is described below
commit 736659e0e1ab2882313e3a41d9a20d4b0f5b0816
Author: Takanobu Asanuma <[email protected]>
AuthorDate: Thu Apr 2 19:21:02 2020 +0900
HDFS-15252. HttpFS: setWorkingDirectory should not accept invalid paths.
Contributed by hemanthboyina.
---
.../org/apache/hadoop/fs/http/client/HttpFSFileSystem.java | 6 ++++++
.../org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java | 12 +++++++++++-
2 files changed, 17 insertions(+), 1 deletion(-)
diff --git
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
index 1722759..a9ef653 100644
---
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
+++
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
@@ -801,6 +802,11 @@ public class HttpFSFileSystem extends FileSystem
*/
@Override
public void setWorkingDirectory(Path newDir) {
+ String result = newDir.toUri().getPath();
+ if (!DFSUtilClient.isValidName(result)) {
+ throw new IllegalArgumentException(
+ "Invalid DFS directory name " + result);
+ }
workingDir = newDir;
}
diff --git
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
index 902861d..106e475 100644
---
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
+++
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.HFSTestCase;
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
+import org.apache.hadoop.test.LambdaTestUtils;
import org.apache.hadoop.test.TestDir;
import org.apache.hadoop.test.TestDirHelper;
import org.apache.hadoop.test.TestHdfs;
@@ -521,9 +522,18 @@ public abstract class BaseTestHttpFSWith extends
HFSTestCase {
fs = getHttpFSFileSystem();
fs.setWorkingDirectory(new Path("/tmp"));
workingDir = fs.getWorkingDirectory();
- fs.close();
assertEquals(workingDir.toUri().getPath(),
new Path("/tmp").toUri().getPath());
+ final FileSystem httpFs = getHttpFSFileSystem();
+ LambdaTestUtils.intercept(IllegalArgumentException.class,
+ "Invalid DFS directory name /foo:bar",
+ () -> httpFs.setWorkingDirectory(new Path("/foo:bar")));
+ fs.setWorkingDirectory(new Path("/bar"));
+ workingDir = fs.getWorkingDirectory();
+ httpFs.close();
+ fs.close();
+ assertEquals(workingDir.toUri().getPath(),
+ new Path("/bar").toUri().getPath());
}
private void testTrashRoot() throws Exception {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]