HDFS-8542. WebHDFS getHomeDirectory behavior does not match specification. Contributed by Kanaka Kumar Avvaru.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/211d8c14 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/211d8c14 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/211d8c14 Branch: refs/heads/YARN-2928 Commit: 211d8c141357d8d382fe168bff83208551ad29c9 Parents: fdafd69 Author: Jakob Homan <jgho...@gmail.com> Authored: Mon Jun 22 16:30:45 2015 -0700 Committer: Zhijie Shen <zjs...@apache.org> Committed: Mon Jun 29 10:28:21 2015 -0700 ---------------------------------------------------------------------- .../apache/hadoop/hdfs/web/JsonUtilClient.java | 10 ++++ .../hadoop/hdfs/web/WebHdfsFileSystem.java | 33 +++++++++-- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 + .../web/resources/NamenodeWebHdfsMethods.java | 12 ++-- .../org/apache/hadoop/hdfs/web/TestWebHDFS.java | 59 ++++++++++++++++++++ .../hdfs/web/TestWebHdfsFileSystemContract.java | 2 +- 6 files changed, 108 insertions(+), 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index ca94840..e025e31 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -389,6 +389,16 @@ class JsonUtilClient { return aclStatusBuilder.build(); } + static String getPath(final Map<?, ?> json) + throws IOException { + if (json == null) { + return null; + } + + String path = (String) json.get("Path"); + return path; + } + static byte[] getXAttr(final Map<?, ?> json, final String name) throws IOException { if (json == null) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index aebd25a..d902738 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -110,6 +110,7 @@ public class WebHdfsFileSystem extends FileSystem protected Text tokenServiceName; private RetryPolicy retryPolicy = null; private Path workingDir; + private Path cachedHomeDirectory; private InetSocketAddress nnAddrs[]; private int currentNNAddrIndex; private boolean disallowFallbackToInsecureCluster; @@ -193,7 +194,7 @@ public class WebHdfsFileSystem extends FileSystem failoverSleepMaxMillis); } - this.workingDir = getHomeDirectory(); + this.workingDir = makeQualified(new Path(getHomeDirectoryString(ugi))); this.canRefreshDelegationToken = UserGroupInformation.isSecurityEnabled(); this.disallowFallbackToInsecureCluster = !conf.getBoolean( CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY, @@ -267,14 +268,35 @@ public class WebHdfsFileSystem extends FileSystem return NetUtils.getCanonicalUri(uri, getDefaultPort()); } - /** @return the home directory. */ + /** @return the home directory */ + @Deprecated public static String getHomeDirectoryString(final UserGroupInformation ugi) { return "/user/" + ugi.getShortUserName(); } @Override public Path getHomeDirectory() { - return makeQualified(new Path(getHomeDirectoryString(ugi))); + if (cachedHomeDirectory == null) { + final HttpOpParam.Op op = GetOpParam.Op.GETHOMEDIRECTORY; + try { + String pathFromDelegatedFS = new FsPathResponseRunner<String>(op, null, + new UserParam(ugi)) { + @Override + String decodeResponse(Map<?, ?> json) throws IOException { + return JsonUtilClient.getPath(json); + } + } .run(); + + cachedHomeDirectory = new Path(pathFromDelegatedFS).makeQualified( + this.getUri(), null); + + } catch (IOException e) { + LOG.error("Unable to get HomeDirectory from original File System", e); + cachedHomeDirectory = new Path("/user/" + ugi.getShortUserName()) + .makeQualified(this.getUri(), null); + } + } + return cachedHomeDirectory; } @Override @@ -284,12 +306,13 @@ public class WebHdfsFileSystem extends FileSystem @Override public synchronized void setWorkingDirectory(final Path dir) { - String result = makeAbsolute(dir).toUri().getPath(); + Path absolutePath = makeAbsolute(dir); + String result = absolutePath.toUri().getPath(); if (!DFSUtilClient.isValidName(result)) { throw new IllegalArgumentException("Invalid DFS directory name " + result); } - workingDir = makeAbsolute(dir); + workingDir = absolutePath; } private Path makeAbsolute(Path f) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index b9d9943..4bd0e8b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -938,6 +938,9 @@ Release 2.8.0 - UNRELEASED HDFS-4366. Block Replication Policy Implementation May Skip Higher-Priority Blocks for Lower-Priority Blocks (Derek Dagit via kihwal) + HDFS-8542. WebHDFS getHomeDirectory behavior does not match specification. + (Kanaka Kumar Avvaru via jghoman) + Release 2.7.1 - UNRELEASED INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index d33721c..6e880f0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -53,6 +53,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.permission.AclStatus; @@ -826,6 +827,8 @@ public class NamenodeWebHdfsMethods { final TokenServiceParam tokenService ) throws IOException, URISyntaxException { final NameNode namenode = (NameNode)context.getAttribute("name.node"); + final Configuration conf = (Configuration) context + .getAttribute(JspHelper.CURRENT_CONF); final NamenodeProtocols np = getRPCServer(namenode); switch(op.getValue()) { @@ -892,11 +895,10 @@ public class NamenodeWebHdfsMethods { final String js = JsonUtil.toJsonString(token); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } - case GETHOMEDIRECTORY: - { - final String js = JsonUtil.toJsonString( - org.apache.hadoop.fs.Path.class.getSimpleName(), - WebHdfsFileSystem.getHomeDirectoryString(ugi)); + case GETHOMEDIRECTORY: { + final String js = JsonUtil.toJsonString("Path", + FileSystem.get(conf != null ? conf : new Configuration()) + .getHomeDirectory().toUri().getPath()); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } case GETACLSTATUS: { http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 0c963f1..0563f12 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -18,12 +18,15 @@ package org.apache.hadoop.hdfs.web; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.fail; import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.InetSocketAddress; +import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.security.PrivilegedExceptionAction; @@ -557,4 +560,60 @@ public class TestWebHDFS { } } } + + @Test(timeout = 30000) + public void testGetHomeDirectory() throws Exception { + + MiniDFSCluster cluster = null; + try { + Configuration conf = new Configuration(); + cluster = new MiniDFSCluster.Builder(conf).build(); + cluster.waitActive(); + DistributedFileSystem hdfs = cluster.getFileSystem(); + + final URI uri = new URI(WebHdfsConstants.WEBHDFS_SCHEME + "://" + + cluster.getHttpUri(0).replace("http://", "")); + final Configuration confTemp = new Configuration(); + + { + WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, + confTemp); + + assertEquals(hdfs.getHomeDirectory().toUri().getPath(), webhdfs + .getHomeDirectory().toUri().getPath()); + + webhdfs.close(); + } + + { + WebHdfsFileSystem webhdfs = createWebHDFSAsTestUser(confTemp, uri, + "XXX"); + + assertNotEquals(hdfs.getHomeDirectory().toUri().getPath(), webhdfs + .getHomeDirectory().toUri().getPath()); + + webhdfs.close(); + } + + } finally { + if (cluster != null) + cluster.shutdown(); + } + } + + private WebHdfsFileSystem createWebHDFSAsTestUser(final Configuration conf, + final URI uri, final String userName) throws Exception { + + final UserGroupInformation ugi = UserGroupInformation.createUserForTesting( + userName, new String[] { "supergroup" }); + + return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() { + @Override + public WebHdfsFileSystem run() throws IOException { + WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) FileSystem.get(uri, + conf); + return webhdfs; + } + }); + } } http://git-wip-us.apache.org/repos/asf/hadoop/blob/211d8c14/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java ---------------------------------------------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index bc10bca..5d20014 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -399,7 +399,7 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest { final HttpURLConnection conn = (HttpURLConnection) url.openConnection(); final Map<?, ?> m = WebHdfsTestUtil.connectAndGetJson( conn, HttpServletResponse.SC_OK); - assertEquals(WebHdfsFileSystem.getHomeDirectoryString(ugi), + assertEquals(webhdfs.getHomeDirectory().toUri().getPath(), m.get(Path.class.getSimpleName())); conn.disconnect(); }