Repository: ranger Updated Branches: refs/heads/master ba99369a8 -> 79e91fa5f
RANGER-1844 Ranger admin support hdfs HA configuration when creating hdfs service. Signed-off-by: peng.jianhua <peng.jian...@zte.com.cn> Project: http://git-wip-us.apache.org/repos/asf/ranger/repo Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/79e91fa5 Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/79e91fa5 Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/79e91fa5 Branch: refs/heads/master Commit: 79e91fa5f5175b3ddeab8d901f7e98697c9d639f Parents: ba99369 Author: zhangqiang2 <zhangqia...@zte.com.cn> Authored: Fri Oct 20 14:07:26 2017 +0800 Committer: peng.jianhua <peng.jian...@zte.com.cn> Committed: Tue Oct 24 16:20:02 2017 +0800 ---------------------------------------------------------------------- .../ranger/services/hdfs/client/HdfsClient.java | 48 +++++++++++++++----- .../scripts/models/BackboneFormDataType.js | 3 ++ .../scripts/modules/globalize/message/en.js | 3 +- 3 files changed, 42 insertions(+), 12 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ranger/blob/79e91fa5/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java index c252213..39fb9e8 100644 --- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java +++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java @@ -270,18 +270,44 @@ public class HdfsClient extends BaseClient { throw new IllegalArgumentException("Value for password not specified"); } } + // hadoop.security.authentication + String authentication = configs.get("hadoop.security.authentication"); + if ((authentication == null || authentication.isEmpty())) { + throw new IllegalArgumentException("Value for hadoop.security.authentication not specified"); + } - // hadoop.security.authentication - String authentication = configs.get("hadoop.security.authentication"); - if ((authentication == null || authentication.isEmpty())) { - throw new IllegalArgumentException("Value for hadoop.security.authentication not specified"); - } - - String fsDefaultName = configs.get("fs.default.name"); - fsDefaultName = (fsDefaultName == null) ? "" : fsDefaultName.trim(); - if (fsDefaultName.isEmpty()) { - throw new IllegalArgumentException("Value for fs.default.name not specified"); - } + String fsDefaultName = configs.get("fs.default.name"); + fsDefaultName = (fsDefaultName == null) ? "" : fsDefaultName.trim(); + if (fsDefaultName.isEmpty()) { + throw new IllegalArgumentException("Value for fs.default.name not specified"); + } else { + String[] fsDefaultNameElements = fsDefaultName.split(","); + for (String fsDefaultNameElement : fsDefaultNameElements) { + if (fsDefaultNameElement.isEmpty()) { + throw new IllegalArgumentException( + "Value for " + "fs.default.name element" + fsDefaultNameElement + " not specified"); + } + } + if (fsDefaultNameElements != null && fsDefaultNameElements.length >= 2) { + String cluster = ""; + String clusters = ""; + configs.put("dfs.nameservices", "hdfscluster"); + configs.put("fs.default.name", "hdfs://" + configs.get("dfs.nameservices")); + configs.put("dfs.client.failover.proxy.provider." + configs.get("dfs.nameservices"), + "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"); + for (int i = 0; i < fsDefaultNameElements.length; i++) { + cluster = "namenode" + (i + 1); + configs.put("dfs.namenode.rpc-address." + configs.get("dfs.nameservices") + "." + cluster, + fsDefaultNameElements[i]); + if (i == (fsDefaultNameElements.length - 1)) { + clusters += cluster; + } else { + clusters += cluster + ","; + } + } + configs.put("dfs.ha.namenodes." + configs.get("dfs.nameservices"), clusters); + } + } String dfsNameservices = configs.get("dfs.nameservices"); dfsNameservices = (dfsNameservices == null) ? "" : dfsNameservices.trim(); http://git-wip-us.apache.org/repos/asf/ranger/blob/79e91fa5/security-admin/src/main/webapp/scripts/models/BackboneFormDataType.js ---------------------------------------------------------------------- diff --git a/security-admin/src/main/webapp/scripts/models/BackboneFormDataType.js b/security-admin/src/main/webapp/scripts/models/BackboneFormDataType.js index 3f8697e..afcc290 100644 --- a/security-admin/src/main/webapp/scripts/models/BackboneFormDataType.js +++ b/security-admin/src/main/webapp/scripts/models/BackboneFormDataType.js @@ -85,6 +85,9 @@ define(function(require) { }else if(v.name == 'jdbc.url'){ formObj.type = 'TextFiledWithIcon'; formObj.errorMsg = localization.tt("hintMsg.hiveJDBCUrl"); + }else if(v.name == 'fs.default.name'){ + formObj.type = 'TextFiledWithIcon'; + formObj.errorMsg = localization.tt("hintMsg.hdfsNameNodeUrl"); }else{ formObj.type = 'Text'; } http://git-wip-us.apache.org/repos/asf/ranger/blob/79e91fa5/security-admin/src/main/webapp/scripts/modules/globalize/message/en.js ---------------------------------------------------------------------- diff --git a/security-admin/src/main/webapp/scripts/modules/globalize/message/en.js b/security-admin/src/main/webapp/scripts/modules/globalize/message/en.js index 811db0c..5900c55 100644 --- a/security-admin/src/main/webapp/scripts/modules/globalize/message/en.js +++ b/security-admin/src/main/webapp/scripts/modules/globalize/message/en.js @@ -446,7 +446,8 @@ define(function(require) { }, hintMsg : { yarnRestUrl :"1.For one url, eg.<br>'http or https://<ipaddr>:8088'<br>2.For multiple urls (use , or ; delimiter), eg.<br>'http://<ipaddr1>:8088,http://<ipaddr2>:8088'", - hiveJDBCUrl :"1.For Remote Mode, eg.<br>jdbc:hive2://<host>:<port><br>2.For Embedded Mode (no host or port), eg.<br>jdbc:hive2:///;initFile=<file><br>3.For HTTP Mode, eg.<br>jdbc:hive2://<host>:<port>/;<br>transportMode=http;httpPath=<httpPath><br>4.For SSL Mode, eg.<br>jdbc:hive2://<host>:<port>/;ssl=true;<br>sslTrustStore=tStore;trustStorePassword=pw<br>5.For ZooKeeper Mode, eg.<br>jdbc:hive2://<host>/;serviceDiscoveryMode=<br>zooKeeper;zooKeeperNamespace=hiveserver2<br>6.For Kerberos Mode, eg.<br>jdbc:hive2://<host>:<port>/;<br>principal=hive/dom...@example.com<br>" + hiveJDBCUrl :"1.For Remote Mode, eg.<br>jdbc:hive2://<host>:<port><br>2.For Embedded Mode (no host or port), eg.<br>jdbc:hive2:///;initFile=<file><br>3.For HTTP Mode, eg.<br>jdbc:hive2://<host>:<port>/;<br>transportMode=http;httpPath=<httpPath><br>4.For SSL Mode, eg.<br>jdbc:hive2://<host>:<port>/;ssl=true;<br>sslTrustStore=tStore;trustStorePassword=pw<br>5.For ZooKeeper Mode, eg.<br>jdbc:hive2://<host>/;serviceDiscoveryMode=<br>zooKeeper;zooKeeperNamespace=hiveserver2<br>6.For Kerberos Mode, eg.<br>jdbc:hive2://<host>:<port>/;<br>principal=hive/dom...@example.com<br>", + hdfsNameNodeUrl :"1.For one Namenode Url, eg.<br>hdfs://<host>:<port><br>2.For HA Namenode Urls(use , delimiter), eg.<br>hdfs://<host>:<port>,hdfs://<host2>:<port2><br>" }