hadoop git commit: HDFS-9525. hadoop utilities need to support provided delegation tokens. Contributed by HeeSoo Kim
Repository: hadoop Updated Branches: refs/heads/branch-2 96111caca -> 88e1427a0 HDFS-9525. hadoop utilities need to support provided delegation tokens. Contributed by HeeSoo Kim Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/88e1427a Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/88e1427a Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/88e1427a Branch: refs/heads/branch-2 Commit: 88e1427a036fb0d7b7dd9cd333fedcf1d63f3ad7 Parents: 96111ca Author: Ravi PrakashAuthored: Sat Apr 23 20:50:56 2016 -0700 Committer: Ravi Prakash Committed: Sat Apr 23 20:50:56 2016 -0700 -- .../fs/CommonConfigurationKeysPublic.java | 3 ++ .../hadoop/security/UserGroupInformation.java | 22 + .../src/main/resources/core-default.xml | 6 +++ .../security/TestUserGroupInformation.java | 48 +++- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 14 +++--- .../hdfs/web/resources/DelegationParam.java | 5 +- .../apache/hadoop/hdfs/web/TestWebHdfsUrl.java | 5 +- 7 files changed, 89 insertions(+), 14 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e1427a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 0baca07..ca17f8d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -316,6 +316,9 @@ public class CommonConfigurationKeysPublic { /** See core-default.xml */ public static final String HADOOP_SECURITY_DNS_NAMESERVER_KEY = "hadoop.security.dns.nameserver"; + /** See core-default.xml */ + public static final String HADOOP_TOKEN_FILES = + "hadoop.token.files"; @Deprecated /** Only used by HttpServer. */ http://git-wip-us.apache.org/repos/asf/hadoop/blob/88e1427a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java -- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java index 90d396f..2ea80dd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/UserGroupInformation.java @@ -18,6 +18,7 @@ package org.apache.hadoop.security; import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_TOKEN_FILES; import static org.apache.hadoop.util.PlatformName.IBM_JAVA; import java.io.File; @@ -66,6 +67,7 @@ import org.apache.hadoop.security.authentication.util.KerberosUtil; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.Shell; +import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Time; import com.google.common.annotations.VisibleForTesting; @@ -811,6 +813,26 @@ public class UserGroupInformation { } loginUser = proxyUser == null ? realUser : createProxyUser(proxyUser, realUser); + String tokenFileLocation = System.getProperty(HADOOP_TOKEN_FILES); + if (tokenFileLocation == null) { +tokenFileLocation = conf.get(HADOOP_TOKEN_FILES); + } + if (tokenFileLocation != null) { +for (String tokenFileName: + StringUtils.getTrimmedStrings(tokenFileLocation)) { + if (tokenFileName.length() > 0) { +File tokenFile = new File(tokenFileName); +if (tokenFile.exists() && tokenFile.isFile()) { + Credentials cred = Credentials.readTokenStorageFile( + tokenFile, conf); + loginUser.addCredentials(cred); +} else { + LOG.info("tokenFile("+tokenFileName+") does not exist"); +} + } +} + } + String fileLocation = System.getenv(HADOOP_TOKEN_FILE_LOCATION); if (fileLocation != null) { // Load the token storage file and put all of the tokens into the
hadoop git commit: HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims)
Repository: hadoop Updated Branches: refs/heads/branch-2.8 358b54d06 -> c4d8090b7 HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims) (cherry picked from commit 6fcde2e38da04cae3aad6b13cf442af211f71506) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c4d8090b Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c4d8090b Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c4d8090b Branch: refs/heads/branch-2.8 Commit: c4d8090b7291d942e80685975de337060dc7d153 Parents: 358b54d Author: Masatake IwasakiAuthored: Sat Apr 23 23:37:56 2016 +0900 Committer: Masatake Iwasaki Committed: Sat Apr 23 23:55:27 2016 +0900 -- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 6 +++-- .../hadoop/hdfs/web/TestWebHdfsTimeouts.java| 25 +--- 2 files changed, 20 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d8090b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 6a90be5..03b372e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -737,8 +737,10 @@ public class WebHdfsFileSystem extends FileSystem node = url.getAuthority(); } try { - ioe = ioe.getClass().getConstructor(String.class) -.newInstance(node + ": " + ioe.getMessage()); +IOException newIoe = ioe.getClass().getConstructor(String.class) +.newInstance(node + ": " + ioe.getMessage()); +newIoe.setStackTrace(ioe.getStackTrace()); +ioe = newIoe; } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/c4d8090b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java index 664e32d..67c39e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.BufferedReader; @@ -46,6 +45,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.runner.RunWith; @@ -150,7 +150,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out",e); } } @@ -163,7 +164,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": Read timed out", e); } } @@ -178,7 +180,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out", e); } } @@ -192,7 +195,8 @@ public class
hadoop git commit: HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims)
Repository: hadoop Updated Branches: refs/heads/branch-2 594943601 -> 96111caca HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims) (cherry picked from commit 6fcde2e38da04cae3aad6b13cf442af211f71506) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/96111cac Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/96111cac Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/96111cac Branch: refs/heads/branch-2 Commit: 96111caca99dc5d73fb22804957796eae9ffed8c Parents: 5949436 Author: Masatake IwasakiAuthored: Sat Apr 23 23:37:56 2016 +0900 Committer: Masatake Iwasaki Committed: Sat Apr 23 23:50:01 2016 +0900 -- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 6 +++-- .../hadoop/hdfs/web/TestWebHdfsTimeouts.java| 25 +--- 2 files changed, 20 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/96111cac/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 6a90be5..03b372e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -737,8 +737,10 @@ public class WebHdfsFileSystem extends FileSystem node = url.getAuthority(); } try { - ioe = ioe.getClass().getConstructor(String.class) -.newInstance(node + ": " + ioe.getMessage()); +IOException newIoe = ioe.getClass().getConstructor(String.class) +.newInstance(node + ": " + ioe.getMessage()); +newIoe.setStackTrace(ioe.getStackTrace()); +ioe = newIoe; } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/96111cac/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java index 664e32d..67c39e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.BufferedReader; @@ -46,6 +45,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.runner.RunWith; @@ -150,7 +150,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out",e); } } @@ -163,7 +164,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": Read timed out", e); } } @@ -178,7 +180,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out", e); } } @@ -192,7 +195,8 @@ public class
hadoop git commit: HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims)
Repository: hadoop Updated Branches: refs/heads/trunk b2a654c5e -> 6fcde2e38 HDFS-9905. WebHdfsFileSystem#runWithRetry should display original stack trace on error. (Wei-Chiu Chuang via iwasakims) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6fcde2e3 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6fcde2e3 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6fcde2e3 Branch: refs/heads/trunk Commit: 6fcde2e38da04cae3aad6b13cf442af211f71506 Parents: b2a654c Author: Masatake IwasakiAuthored: Sat Apr 23 23:37:56 2016 +0900 Committer: Masatake Iwasaki Committed: Sat Apr 23 23:37:56 2016 +0900 -- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 6 +++-- .../hadoop/hdfs/web/TestWebHdfsTimeouts.java| 25 +--- 2 files changed, 20 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fcde2e3/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 3527c27..ab4e0d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -730,8 +730,10 @@ public class WebHdfsFileSystem extends FileSystem node = url.getAuthority(); } try { - ioe = ioe.getClass().getConstructor(String.class) -.newInstance(node + ": " + ioe.getMessage()); +IOException newIoe = ioe.getClass().getConstructor(String.class) +.newInstance(node + ": " + ioe.getMessage()); +newIoe.setStackTrace(ioe.getStackTrace()); +ioe = newIoe; } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException e) { http://git-wip-us.apache.org/repos/asf/hadoop/blob/6fcde2e3/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java -- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java index 664e32d..67c39e1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java @@ -18,7 +18,6 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.io.BufferedReader; @@ -46,6 +45,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.runner.RunWith; @@ -150,7 +150,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out",e); } } @@ -163,7 +164,8 @@ public class TestWebHdfsTimeouts { fs.listFiles(new Path("/"), false); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": Read timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": Read timed out", e); } } @@ -178,7 +180,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected timeout"); } catch (SocketTimeoutException e) { - assertEquals(fs.getUri().getAuthority() + ": connect timed out", e.getMessage()); + GenericTestUtils.assertExceptionContains(fs.getUri().getAuthority() + + ": connect timed out", e); } } @@ -192,7 +195,8 @@ public class TestWebHdfsTimeouts { fs.getDelegationToken("renewer"); fail("expected
hadoop git commit: YARN-4335. Allow ResourceRequests to specify ExecutionType of a request ask (kkaranasos via asuresh)
Repository: hadoop Updated Branches: refs/heads/trunk 63e5412f1 -> b2a654c5e YARN-4335. Allow ResourceRequests to specify ExecutionType of a request ask (kkaranasos via asuresh) (cherry picked from commit 8ffabfdf4fec104cdaa68133385cfe9cf88b95a8) Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b2a654c5 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b2a654c5 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b2a654c5 Branch: refs/heads/trunk Commit: b2a654c5ee6524f81c971ea0b70e58ea0a455f1d Parents: 63e5412 Author: Arun SureshAuthored: Thu Jan 7 15:18:59 2016 -0800 Committer: Arun Suresh Committed: Fri Apr 22 23:20:54 2016 -0700 -- .../yarn/api/records/ResourceRequest.java | 41 ++- .../src/main/proto/yarn_protos.proto| 1 + .../hadoop/yarn/client/api/AMRMClient.java | 43 ++-- .../records/impl/pb/ResourceRequestPBImpl.java | 21 ++ 4 files changed, 102 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2a654c5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceRequest.java -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceRequest.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceRequest.java index 790120a..8c1fd8d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceRequest.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ResourceRequest.java @@ -79,6 +79,15 @@ public abstract class ResourceRequest implements Comparable { public static ResourceRequest newInstance(Priority priority, String hostName, Resource capability, int numContainers, boolean relaxLocality, String labelExpression) { +return newInstance(priority, hostName, capability, numContainers, +relaxLocality, labelExpression, ExecutionType.GUARANTEED); + } + + @Public + @Stable + public static ResourceRequest newInstance(Priority priority, String hostName, + Resource capability, int numContainers, boolean relaxLocality, String + labelExpression, ExecutionType execType) { ResourceRequest request = Records.newRecord(ResourceRequest.class); request.setPriority(priority); request.setResourceName(hostName); @@ -86,6 +95,7 @@ public abstract class ResourceRequest implements Comparable { request.setNumContainers(numContainers); request.setRelaxLocality(relaxLocality); request.setNodeLabelExpression(labelExpression); +request.setExecutionType(execType); return request; } @@ -221,7 +231,28 @@ public abstract class ResourceRequest implements Comparable { @Public @Stable public abstract boolean getRelaxLocality(); - + + /** + * Set the ExecutionType of the requested container. + * + * @param execType + * ExecutionType of the requested container + */ + @Public + @Stable + public abstract void setExecutionType(ExecutionType execType); + + /** + * Get whether locality relaxation is enabled with this + * ResourceRequest. Defaults to true. + * + * @return whether locality relaxation is enabled with this + * ResourceRequest. + */ + @Public + @Stable + public abstract ExecutionType getExecutionType(); + /** * For a request at a network hierarchy level, set whether locality can be relaxed * to that level and beyond. @@ -322,6 +353,14 @@ public abstract class ResourceRequest implements Comparable { return false; } else if (!priority.equals(other.getPriority())) return false; +ExecutionType executionType = getExecutionType(); +if (executionType == null) { + if (other.getExecutionType() != null) { +return false; + } +} else if (executionType != other.getExecutionType()) { + return false; +} if (getNodeLabelExpression() == null) { if (other.getNodeLabelExpression() != null) { return false; http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2a654c5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto -- diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto index 2fe4eda..635f2f0 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto