http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java new file mode 100644 index 0000000..e9b6c33 --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java @@ -0,0 +1,387 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.knox.client; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.ranger.hadoop.client.config.BaseClient; +import org.apache.ranger.hadoop.client.exceptions.HadoopException; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; + +public class KnoxClient { + + private static final String EXPECTED_MIME_TYPE = "application/json"; + private static final Log LOG = LogFactory.getLog(KnoxClient.class); + + private String knoxUrl; + private String userName; + private String password; + + /* + Sample curl calls to Knox to discover topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin + */ + + public KnoxClient(String knoxUrl, String userName, String password) { + LOG.debug("Constructed KnoxClient with knoxUrl: " + knoxUrl + + ", userName: " + userName); + this.knoxUrl = knoxUrl; + this.userName = userName; + this.password = password; + } + + public List<String> getTopologyList(String topologyNameMatching) { + + // sample URI: https://hdp.example.com:8443/gateway/admin/api/v1/topologies + LOG.debug("Getting Knox topology list for topologyNameMatching : " + + topologyNameMatching); + List<String> topologyList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if ( topologyNameMatching == null || topologyNameMatching.trim().isEmpty()) { + topologyNameMatching = ""; + } + try { + + Client client = null; + ClientResponse response = null; + + try { + client = Client.create();; + + client.addFilter(new HTTPBasicAuthFilter(userName, password)); + WebResource webResource = client.resource(knoxUrl); + response = webResource.accept(EXPECTED_MIME_TYPE) + .get(ClientResponse.class); + LOG.debug("Knox topology list response: " + response); + if (response != null) { + + if (response.getStatus() == 200) { + String jsonString = response.getEntity(String.class); + LOG.debug("Knox topology list response JSON string: "+ jsonString); + + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode rootNode = objectMapper.readTree(jsonString); + JsonNode topologyNode = rootNode.findValue("topology"); + if (topologyNode == null) { + return topologyList; + } + Iterator<JsonNode> elements = topologyNode.getElements(); + while (elements.hasNext()) { + JsonNode element = elements.next(); + String topologyName = element.get("name").getValueAsText(); + LOG.debug("Found Knox topologyName: " + topologyName); + if (topologyName.startsWith(topologyNameMatching)) { + topologyList.add(topologyName); + } + } + } else { + LOG.error("Got invalid REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus()); + } + + } else { + String msgDesc = "Unable to get a valid response for " + + "isFileChanged() call for KnoxUrl : [" + knoxUrl + + "] - got null response."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, + msgDesc + errMsg, null, null); + throw hdpException; + } + + } finally { + if (response != null) { + response.close(); + } + if (client != null) { + client.destroy(); + } + } + } catch (HadoopException he) { + throw he; + } catch (Throwable t) { + String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + "."; + HadoopException hdpException = new HadoopException(msgDesc, t); + LOG.error(msgDesc, t); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(t), msgDesc + errMsg, null, null); + throw hdpException; + } finally { + } + return topologyList; + } + + + public List<String> getServiceList(String topologyName, String serviceNameMatching) { + + // sample URI: .../admin/api/v1/topologies/<topologyName> + + List<String> serviceList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if ( serviceNameMatching == null || serviceNameMatching.trim().isEmpty()) { + serviceNameMatching = ""; + } + try { + + Client client = null; + ClientResponse response = null; + + try { + client = Client.create();; + + client.addFilter(new HTTPBasicAuthFilter(userName, password)); + + WebResource webResource = client.resource(knoxUrl + "/" + topologyName); + + response = webResource.accept(EXPECTED_MIME_TYPE) + .get(ClientResponse.class); + LOG.debug("Knox service lookup response: " + response); + if (response != null) { + + if (response.getStatus() == 200) { + String jsonString = response.getEntity(String.class); + LOG.debug("Knox service look up response JSON string: " + jsonString); + + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode rootNode = objectMapper.readTree(jsonString); + JsonNode topologyNode = rootNode.findValue("topology"); + JsonNode servicesNode = topologyNode.get("services"); + Iterator<JsonNode> services = servicesNode.getElements(); + while (services.hasNext()) { + JsonNode service = services.next(); + String serviceName = service.get("role").getValueAsText(); + LOG.debug("Knox serviceName: " + serviceName); + if (serviceName.startsWith(serviceNameMatching)) { + serviceList.add(serviceName); + } + } + } else { + LOG.error("Got invalid REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus()); + } + + } else { + String msgDesc = "Unable to get a valid response for " + + "isFileChanged() call for KnoxUrl : [" + knoxUrl + + "] - got null response."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, + msgDesc + errMsg, null, null); + throw hdpException; + } + + } finally { + if (response != null) { + response.close(); + } + if (client != null) { + client.destroy(); + } + } + } catch (HadoopException he) { + throw he; + } catch (Throwable t) { + String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + "."; + HadoopException hdpException = new HadoopException(msgDesc, t); + LOG.error(msgDesc, t); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(t), msgDesc + errMsg, null, null); + throw hdpException; + + } finally { + } + return serviceList; + } + + public static void main(String[] args) { + + KnoxClient knoxClient = null; + + if (args.length != 3) { + System.err.println("USAGE: java " + KnoxClient.class.getName() + + " knoxUrl userName password [sslConfigFileName]"); + System.exit(1); + } + + try { + knoxClient = new KnoxClient(args[0], args[1], args[2]); + List<String> topologyList = knoxClient.getTopologyList(""); + if ((topologyList == null) || topologyList.isEmpty()) { + System.out.println("No knox topologies found"); + } else { + for (String topology : topologyList) { + System.out.println("Found Topology: " + topology); + List<String> serviceList = knoxClient.getServiceList(topology, ""); + if ((serviceList == null) || serviceList.isEmpty()) { + System.out.println("No services found for knox topology: " + topology); + } else { + for (String service : serviceList) { + System.out.println(" Found service for topology: " + service +", " + topology); + } + } + } + } + } finally { + } + } + + public static HashMap<String, Object> testConnection(String dataSource, + HashMap<String, String> connectionProperties) { + + List<String> strList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + boolean connectivityStatus = false; + HashMap<String, Object> responseData = new HashMap<String, Object>(); + + KnoxClient knoxClient = getKnoxClient(dataSource, connectionProperties); + strList = getKnoxResources(knoxClient, "", null); + + if (strList != null && (strList.size() != 0)) { + connectivityStatus = true; + } + + if (connectivityStatus) { + String successMsg = "TestConnection Successful"; + BaseClient.generateResponseDataMap(connectivityStatus, successMsg, successMsg, + null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any topologies/services using given parameters."; + BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg, + null, null, responseData); + } + + return responseData; + } + + public static KnoxClient getKnoxClient(String dataSourceName, + Map<String, String> configMap) { + KnoxClient knoxClient = null; + LOG.debug("Getting knoxClient for datasource: " + dataSourceName + + "configMap: " + configMap); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (configMap == null || configMap.isEmpty()) { + String msgDesc = "Could not connect as Connection ConfigMap is empty."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, + null); + throw hdpException; + } else { + String knoxUrl = configMap.get("knox.url"); + String knoxAdminUser = configMap.get("username"); + String knoxAdminPassword = configMap.get("password"); + knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, + knoxAdminPassword); + } + return knoxClient; + } + + public static List<String> getKnoxResources(final KnoxClient knoxClient, + String topologyName, String serviceName) { + + List<String> resultList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + try { + if (knoxClient == null) { + // LOG.error("Unable to get knox resources: knoxClient is null"); + // return new ArrayList<String>(); + String msgDesc = "Unable to get knox resources: knoxClient is null."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, + null, null); + throw hdpException; + } + + final Callable<List<String>> callableObj; + if (serviceName != null) { + final String finalServiceNameMatching = serviceName.trim(); + final String finalTopologyName = topologyName; + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return knoxClient.getServiceList(finalTopologyName, + finalServiceNameMatching); + } + }; + + } else { + final String finalTopologyNameMatching = (topologyName == null) ? "" + : topologyName.trim(); + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return knoxClient + .getTopologyList(finalTopologyNameMatching); + } + }; + } + resultList = timedTask(callableObj, 5, TimeUnit.SECONDS); + + } catch (HadoopException he) { + throw he; + } catch (Exception e) { + String msgDesc = "Unable to get knox resources."; + LOG.error(msgDesc, e); + HadoopException hdpException = new HadoopException(msgDesc); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(e), msgDesc + errMsg, null, null); + throw hdpException; + } + + return resultList; + } + + public static <T> T timedTask(Callable<T> callableObj, long timeout, + TimeUnit timeUnit) throws Exception { + return callableObj.call(); + } + +}
http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java new file mode 100644 index 0000000..dd6acbc --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.knox.client; + +public class KnoxClientTest { + + + /* + Sample curl calls to knox REST API to discover topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin + */ + + public static void main(String[] args) { + System.out.println(System.getProperty("java.class.path")); + System.setProperty("javax.net.ssl.trustStore", "/tmp/cacertswithknox)"); + String[] testArgs = { + "https://localhost:8443/gateway/admin/api/v1/topologies", + "admin", + "admin-password" + }; + KnoxClient.main(testArgs); + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java b/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java new file mode 100644 index 0000000..763ce75 --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.knox.client; + +public class RangerConstants { + + // xasecure 2-way ssl configuration + + public static final String RANGER_KNOX_CLIENT_KEY_FILE = "xasecure.knoxclient.ssl.keystore"; + public static final String RANGER_KNOX_CLIENT_KEY_FILE_PASSWORD = "xasecure.knoxclien.tssl.keystore.password"; + public static final String RANGER_KNOX_CLIENT_KEY_FILE_TYPE = "xasecure.knoxclient.ssl.keystore.type"; + + public static final String RANGER_KNOX_CLIENT_KEY_FILE_TYPE_DEFAULT = "jks"; + + public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE = "xasecure.knoxclient.ssl.truststore"; + public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_PASSWORD = "xasecure.knoxclient.ssl.truststore.password"; + public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE = "xasecure.knoxclient.ssl.truststore.type"; + + public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE_DEFAULT = "jks"; + + + public static final String RANGER_SSL_KEYMANAGER_ALGO_TYPE = "SunX509" ; + public static final String RANGER_SSL_TRUSTMANAGER_ALGO_TYPE = "SunX509" ; + public static final String RANGER_SSL_CONTEXT_ALGO_TYPE = "SSL" ; + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java b/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java new file mode 100644 index 0000000..db736b0 --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java @@ -0,0 +1,395 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.storm.client; + +import java.io.IOException; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.security.auth.Subject; +import javax.security.auth.login.AppConfigurationEntry; +import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; +import javax.security.auth.login.LoginContext; +import javax.security.auth.login.LoginException; + +import org.apache.commons.io.FilenameUtils; +import org.apache.hadoop.security.KrbPasswordSaverLoginModule; +import org.apache.hadoop.security.authentication.util.KerberosUtil; +import org.apache.log4j.Logger; +import org.apache.ranger.hadoop.client.config.BaseClient; +import org.apache.ranger.hadoop.client.exceptions.HadoopException; +import org.apache.ranger.storm.client.json.model.Topology; +import org.apache.ranger.storm.client.json.model.TopologyListResponse; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; + +public class StormClient { + + public static final Logger LOG = Logger.getLogger(StormClient.class) ; + + private static final String EXPECTED_MIME_TYPE = "application/json"; + + private static final String TOPOLOGY_LIST_API_ENDPOINT = "/api/v1/topology/summary" ; + + + String stormUIUrl; + String userName; + String password; + + public StormClient(String aStormUIUrl, String aUserName, String aPassword) { + + this.stormUIUrl = aStormUIUrl; + this.userName = aUserName ; + this.password = aPassword; + + if (LOG.isDebugEnabled()) { + LOG.debug("Storm Client is build with url [" + aStormUIUrl + "] user: [" + aUserName + "], password: [" + "" + "]"); + } + + } + + public List<String> getTopologyList(final String topologyNameMatching) { + + LOG.debug("Getting Storm topology list for topologyNameMatching : " + + topologyNameMatching); + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + List<String> ret = new ArrayList<String>(); + + PrivilegedAction<ArrayList<String>> topologyListGetter = new PrivilegedAction<ArrayList<String>>() { + @Override + public ArrayList<String> run() { + + ArrayList<String> lret = new ArrayList<String>(); + + String url = stormUIUrl + TOPOLOGY_LIST_API_ENDPOINT ; + + Client client = null ; + ClientResponse response = null ; + + try { + client = Client.create() ; + + WebResource webResource = client.resource(url); + + response = webResource.accept(EXPECTED_MIME_TYPE) + .get(ClientResponse.class); + + LOG.debug("getTopologyList():calling " + url); + + if (response != null) { + LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus()); + if (response.getStatus() == 200) { + String jsonString = response.getEntity(String.class); + Gson gson = new GsonBuilder().setPrettyPrinting().create(); + TopologyListResponse topologyListResponse = gson.fromJson(jsonString, TopologyListResponse.class); + if (topologyListResponse != null) { + if (topologyListResponse.getTopologyList() != null) { + for(Topology topology : topologyListResponse.getTopologyList()) { + String toplogyName = topology.getName() ; + LOG.debug("getTopologyList():Found topology " + toplogyName); + LOG.debug("getTopologyList():topology Name=[" + topology.getName() + "], topologyNameMatching=[" + topologyNameMatching + "]"); + if (toplogyName != null) { + if (topologyNameMatching == null || topologyNameMatching.isEmpty() || FilenameUtils.wildcardMatch(topology.getName(), topologyNameMatching + "*")) { + LOG.debug("getTopologyList():Adding topology " + toplogyName); + lret.add(toplogyName) ; + } + } + } + } + } + } else{ + LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " for URL " + url + ", so returning null list"); + String jsonString = response.getEntity(String.class); + LOG.info(jsonString); + lret = null; + } + } else { + String msgDesc = "Unable to get a valid response for " + + "expected mime type : [" + EXPECTED_MIME_TYPE + + "] URL : " + url + " - got null response."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, + msgDesc + errMsg, null, null); + throw hdpException; + } + } catch (HadoopException he) { + throw he; + } catch (Throwable t) { + String msgDesc = "Exception while getting Storm TopologyList." + + " URL : " + url; + HadoopException hdpException = new HadoopException(msgDesc, + t); + LOG.error(msgDesc, t); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(t), msgDesc + errMsg, null, + null); + throw hdpException; + + } finally { + if (response != null) { + response.close(); + } + + if (client != null) { + client.destroy(); + } + + } + return lret ; + } + } ; + + try { + ret = executeUnderKerberos(this.userName, this.password, topologyListGetter) ; + } catch (IOException e) { + LOG.error("Unable to get Topology list from [" + stormUIUrl + "]", e) ; + } + + return ret; + } + + public static <T> T executeUnderKerberos(String userName, String password, + PrivilegedAction<T> action) throws IOException { + + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + class MySecureClientLoginConfiguration extends + javax.security.auth.login.Configuration { + + private String userName; + private String password ; + + MySecureClientLoginConfiguration(String aUserName, + String password) { + this.userName = aUserName; + this.password = password; + } + + @Override + public AppConfigurationEntry[] getAppConfigurationEntry( + String appName) { + + Map<String, String> kerberosOptions = new HashMap<String, String>(); + kerberosOptions.put("principal", this.userName); + kerberosOptions.put("debug", "true"); + kerberosOptions.put("useKeyTab", "false"); + kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, this.userName); + kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, this.password); + kerberosOptions.put("doNotPrompt", "false"); + kerberosOptions.put("useFirstPass", "true"); + kerberosOptions.put("tryFirstPass", "false"); + kerberosOptions.put("storeKey", "true"); + kerberosOptions.put("refreshKrb5Config", "true"); + + AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = null; + AppConfigurationEntry KERBEROS_PWD_SAVER = null; + try { + KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry( + KerberosUtil.getKrb5LoginModuleName(), + AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, + kerberosOptions); + KERBEROS_PWD_SAVER = new AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), LoginModuleControlFlag.REQUIRED, kerberosOptions); + + } catch (IllegalArgumentException e) { + String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList."; + HadoopException hdpException = new HadoopException(msgDesc, + e); + LOG.error(msgDesc, e); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(e), msgDesc + errMsg, null, + null); + throw hdpException; + } + + LOG.debug("getAppConfigurationEntry():" + kerberosOptions.get("principal")); + + return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN }; + } + + }; + + T ret = null; + + Subject subject = null; + LoginContext loginContext = null; + + try { + subject = new Subject(); + LOG.debug("executeUnderKerberos():user=" + userName + ",pass="); + LOG.debug("executeUnderKerberos():Creating config.."); + MySecureClientLoginConfiguration loginConf = new MySecureClientLoginConfiguration( + userName, password); + LOG.debug("executeUnderKerberos():Creating Context.."); + loginContext = new LoginContext("hadoop-keytab-kerberos", subject, + null, loginConf); + + LOG.debug("executeUnderKerberos():Logging in.."); + loginContext.login(); + + Subject loginSubj = loginContext.getSubject(); + + if (loginSubj != null) { + ret = Subject.doAs(loginSubj, action); + } + } catch (LoginException le) { + String msgDesc = "executeUnderKerberos: Login failure using given" + + " configuration parameters, username : `" + userName + "`."; + HadoopException hdpException = new HadoopException(msgDesc, le); + LOG.error(msgDesc, le); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(le), msgDesc + errMsg, null, null); + throw hdpException; + } catch (SecurityException se) { + String msgDesc = "executeUnderKerberos: Exception while getting Storm TopologyList."; + HadoopException hdpException = new HadoopException(msgDesc, se); + LOG.error(msgDesc, se); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(se), msgDesc + errMsg, null, null); + throw hdpException; + + } finally { + if (loginContext != null) { + if (subject != null) { + try { + loginContext.logout(); + } catch (LoginException e) { + throw new IOException("logout failure", e); + } + } + } + } + + return ret; + } + + public static HashMap<String, Object> testConnection(String dataSource, + HashMap<String, String> connectionProperties) { + + List<String> strList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + boolean connectivityStatus = false; + HashMap<String, Object> responseData = new HashMap<String, Object>(); + + StormClient stormClient = getStormClient(dataSource, + connectionProperties); + strList = getStormResources(stormClient, ""); + + if (strList != null) { + connectivityStatus = true; + } + + if (connectivityStatus) { + String successMsg = "TestConnection Successful"; + BaseClient.generateResponseDataMap(connectivityStatus, successMsg, + successMsg, null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any topologies using given parameters."; + BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, + failureMsg + errMsg, null, null, responseData); + } + + return responseData; + } + + public static StormClient getStormClient(String dataSourceName, + Map<String, String> configMap) { + StormClient stormClient = null; + LOG.debug("Getting StormClient for datasource: " + dataSourceName + + "configMap: " + configMap); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (configMap == null || configMap.isEmpty()) { + String msgDesc = "Could not connect as Connection ConfigMap is empty."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + + errMsg, null, null); + throw hdpException; + } else { + String stormUrl = configMap.get("nimbus.url"); + String stormAdminUser = configMap.get("username"); + String stormAdminPassword = configMap.get("password"); + stormClient = new StormClient(stormUrl, stormAdminUser, + stormAdminPassword); + } + return stormClient; + } + + public static List<String> getStormResources(final StormClient stormClient, + String topologyName) { + + List<String> resultList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + try { + if (stormClient == null) { + String msgDesc = "Unable to get Storm resources: StormClient is null."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + + errMsg, null, null); + throw hdpException; + } + + if (topologyName != null) { + String finalTopologyNameMatching = (topologyName == null) ? "" + : topologyName.trim(); + resultList = stormClient + .getTopologyList(finalTopologyNameMatching); + if (resultList != null) { + LOG.debug("Returning list of " + resultList.size() + " topologies"); + } + } + } catch (HadoopException he) { + throw he; + } catch (Exception e) { + String msgDesc = "getStormResources: Unable to get Storm resources."; + LOG.error(msgDesc, e); + HadoopException hdpException = new HadoopException(msgDesc); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(e), msgDesc + errMsg, null, null); + throw hdpException; + } + return resultList; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java new file mode 100644 index 0000000..4abf1e7 --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.storm.client.json.model; + +public class Topology { + private String id ; + private String name ; + private String status ; + + public String getId() { + return id; + } + public void setId(String id) { + this.id = id; + } + public String getName() { + return name; + } + public void setName(String name) { + this.name = name; + } + public String getStatus() { + return status; + } + public void setStatus(String status) { + this.status = status; + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java ---------------------------------------------------------------------- diff --git a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java new file mode 100644 index 0000000..14d641d --- /dev/null +++ b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.storm.client.json.model; + +import java.util.List; + +import com.google.gson.annotations.SerializedName; + +public class TopologyListResponse { + @SerializedName("topologies") + private List<Topology> topologyList; + + public List<Topology> getTopologyList() { + return topologyList; + } + + public void setTopologyList(List<Topology> topologyList) { + this.topologyList = topologyList; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/scripts/run-audit-test.sh ---------------------------------------------------------------------- diff --git a/lookup-client/src/scripts/run-audit-test.sh b/lookup-client/src/scripts/run-audit-test.sh index 698459f..9dd00eb 100755 --- a/lookup-client/src/scripts/run-audit-test.sh +++ b/lookup-client/src/scripts/run-audit-test.sh @@ -27,4 +27,4 @@ done export cp -java -Xmx1024M -Xms1024M -cp "${cp}" com.xasecure.audit.test.TestEvents $* +java -Xmx1024M -Xms1024M -cp "${cp}" org.apache.ranger.audit.test.TestEvents $* http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/scripts/run-hadoop-client.sh ---------------------------------------------------------------------- diff --git a/lookup-client/src/scripts/run-hadoop-client.sh b/lookup-client/src/scripts/run-hadoop-client.sh index afb4904..03aff46 100644 --- a/lookup-client/src/scripts/run-hadoop-client.sh +++ b/lookup-client/src/scripts/run-hadoop-client.sh @@ -17,7 +17,7 @@ case $# in 4 ) - java -cp "./dist/*:./lib/hadoop/*:./conf:." com.xasecure.hadoop.client.HadoopFSTester "${1}" "${2}" "${3}" "${4}" ;; + java -cp "./dist/*:./lib/hadoop/*:./conf:." org.apache.ranger.hadoop.client.HadoopFSTester "${1}" "${2}" "${3}" "${4}" ;; * ) - java -cp "./dist/*:./lib/hadoop/*:./conf:." com.xasecure.hadoop.client.HadoopFSTester ;; + java -cp "./dist/*:./lib/hadoop/*:./conf:." org.apache.ranger.hadoop.client.HadoopFSTester ;; esac \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/scripts/run-hbase-client.sh ---------------------------------------------------------------------- diff --git a/lookup-client/src/scripts/run-hbase-client.sh b/lookup-client/src/scripts/run-hbase-client.sh index 3790235..1c9cdb4 100644 --- a/lookup-client/src/scripts/run-hbase-client.sh +++ b/lookup-client/src/scripts/run-hbase-client.sh @@ -19,11 +19,11 @@ cp="./dist/*:./conf/:.:./lib/hadoop/*:./lib/hive/*:./lib/hbase/*" case $# in 2 ) -java ${JOPTS} -cp "${cp}" com.xasecure.hbase.client.HBaseClientTester "${1}" "${2}" ;; +java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester "${1}" "${2}" ;; 3 ) -java ${JOPTS} -cp "${cp}" com.xasecure.hbase.client.HBaseClientTester "${1}" "${2}" "${3}" ;; +java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester "${1}" "${2}" "${3}" ;; 4 ) -java ${JOPTS} -cp "${cp}" com.xasecure.hbase.client.HBaseClientTester "${1}" "${2}" "${3}" "${4}" ;; +java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester "${1}" "${2}" "${3}" "${4}" ;; * ) -java ${JOPTS} -cp "${cp}" com.xasecure.hbase.client.HBaseClientTester;; +java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester;; esac \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/lookup-client/src/scripts/run-hive-client.sh ---------------------------------------------------------------------- diff --git a/lookup-client/src/scripts/run-hive-client.sh b/lookup-client/src/scripts/run-hive-client.sh index f50bbd6..4e80d10 100644 --- a/lookup-client/src/scripts/run-hive-client.sh +++ b/lookup-client/src/scripts/run-hive-client.sh @@ -17,13 +17,13 @@ case $# in 2 ) - java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." com.xasecure.hive.client.HiveClientTester "$1" "${2}" ;; + java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" ;; 3 ) - java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." com.xasecure.hive.client.HiveClientTester "$1" "${2}" "${3}" ;; + java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" ;; 4 ) - java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." com.xasecure.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" ;; + java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" ;; 5 ) - java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." com.xasecure.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" "${5}" ;; + java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" "${5}" ;; * ) - java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." com.xasecure.hive.client.HiveClientTester ;; + java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:." org.apache.ranger.hive.client.HiveClientTester ;; esac http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/.gitignore ---------------------------------------------------------------------- diff --git a/security-admin/.gitignore b/security-admin/.gitignore index 0f63015..de3a426 100644 --- a/security-admin/.gitignore +++ b/security-admin/.gitignore @@ -1,2 +1,3 @@ /target/ /bin/ +/bin/ http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/.settings/org.eclipse.wst.common.component ---------------------------------------------------------------------- diff --git a/security-admin/.settings/org.eclipse.wst.common.component b/security-admin/.settings/org.eclipse.wst.common.component index 5689098..4e51866 100644 --- a/security-admin/.settings/org.eclipse.wst.common.component +++ b/security-admin/.settings/org.eclipse.wst.common.component @@ -4,7 +4,6 @@ <wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/> <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/> <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/resources"/> - <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/test/java"/> <dependent-module archiveName="lookup-client-0.4.0.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/lookup-client/lookup-client"> <dependency-type>uses</dependency-type> </dependent-module> http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/scripts/setup.sh ---------------------------------------------------------------------- diff --git a/security-admin/scripts/setup.sh b/security-admin/scripts/setup.sh index dc5dbd3..bc241e2 100755 --- a/security-admin/scripts/setup.sh +++ b/security-admin/scripts/setup.sh @@ -792,7 +792,7 @@ update_properties() { then mkdir -p `dirname "${keystore}"` - $JAVA_HOME/bin/java -cp "cred/lib/*" com.hortonworks.credentialapi.buildks create "$db_password_alias" -value "$db_password" -provider jceks://file$keystore + $JAVA_HOME/bin/java -cp "cred/lib/*" org.apache.ranger.credentialapi.buildks create "$db_password_alias" -value "$db_password" -provider jceks://file$keystore propertyName=xaDB.jdbc.credential.alias newPropertyValue="${db_password_alias}" @@ -829,7 +829,7 @@ update_properties() { if [ "${keystore}" != "" ] then - $JAVA_HOME/bin/java -cp "cred/lib/*" com.hortonworks.credentialapi.buildks create "$audit_db_password_alias" -value "$audit_db_password" -provider jceks://file$keystore + $JAVA_HOME/bin/java -cp "cred/lib/*" org.apache.ranger.credentialapi.buildks create "$audit_db_password_alias" -value "$audit_db_password" -provider jceks://file$keystore propertyName=auditDB.jdbc.credential.alias newPropertyValue="${audit_db_password_alias}" @@ -1239,10 +1239,10 @@ execute_java_patches(){ dt=`date '+%s'` tempFile=/tmp/sql_${dt}_$$.sql mysqlexec="${SQL_COMMAND_INVOKER} -u ${db_root_user} --password="${db_root_password}" -h ${DB_HOST} ${db_name}" - javaFiles=`ls -1 $app_home/WEB-INF/classes/com/xasecure/patch/Patch*.class 2> /dev/null | awk -F/ '{ print $NF }' | awk -F_J '{ print $2, $0 }' | sort -k1 -n | awk '{ printf("%s\n",$2) ; }'` + javaFiles=`ls -1 $app_home/WEB-INF/classes/org/apache/ranger/patch/Patch*.class 2> /dev/null | awk -F/ '{ print $NF }' | awk -F_J '{ print $2, $0 }' | sort -k1 -n | awk '{ printf("%s\n",$2) ; }'` for javaPatch in ${javaFiles} do - if test -f "$app_home/WEB-INF/classes/com/xasecure/patch/$javaPatch"; then + if test -f "$app_home/WEB-INF/classes/org/apache/ranger/patch/$javaPatch"; then className=$(basename "$javaPatch" .class) version=`echo ${className} | awk -F'_' '{ print $2 }'` if [ "${version}" != "" ] @@ -1252,7 +1252,7 @@ execute_java_patches(){ if [ ${c} -eq 0 ] then log "[I] patch ${javaPatch} is being applied.."; - msg=`$JAVA_HOME/bin/java -cp "$app_home/WEB-INF/classes/conf:$app_home/WEB-INF/classes/lib/*:$app_home/WEB-INF/:$app_home/META-INF/:$app_home/WEB-INF/lib/*:$app_home/WEB-INF/classes/:$app_home/WEB-INF/classes/META-INF/" com.xasecure.patch.${className}` + msg=`$JAVA_HOME/bin/java -cp "$app_home/WEB-INF/classes/conf:$app_home/WEB-INF/classes/lib/*:$app_home/WEB-INF/:$app_home/META-INF/:$app_home/WEB-INF/lib/*:$app_home/WEB-INF/classes/:$app_home/WEB-INF/classes/META-INF/" org.apache.ranger.patch.${className}` check_ret_status $? "Unable to apply patch:$javaPatch. $msg" touch ${tempFile} echo >> ${tempFile} @@ -1272,10 +1272,10 @@ execute_java_patches(){ then dt=`date '+%s'` tempFile=/tmp/sql_${dt}_$$.sql - javaFiles=`ls -1 $app_home/WEB-INF/classes/com/xasecure/patch/Patch*.class 2> /dev/null | awk -F/ '{ print $NF }' | awk -F_J '{ print $2, $0 }' | sort -k1 -n | awk '{ printf("%s\n",$2) ; }'` + javaFiles=`ls -1 $app_home/WEB-INF/classes/org/apache/ranger/patch/Patch*.class 2> /dev/null | awk -F/ '{ print $NF }' | awk -F_J '{ print $2, $0 }' | sort -k1 -n | awk '{ printf("%s\n",$2) ; }'` for javaPatch in ${javaFiles} do - if test -f "$app_home/WEB-INF/classes/com/xasecure/patch/$javaPatch"; then + if test -f "$app_home/WEB-INF/classes/org/apache/ranger/patch/$javaPatch"; then className=$(basename "$javaPatch" .class) version=`echo ${className} | awk -F'_' '{ print $2 }'` if [ "${version}" != "" ] @@ -1285,7 +1285,7 @@ execute_java_patches(){ if test "${result2#*$version}" == "$result2" then log "[I] patch ${javaPatch} is being applied.."; - msg=`$JAVA_HOME/bin/java -cp "$app_home/WEB-INF/classes/conf:$app_home/WEB-INF/classes/lib/*:$app_home/WEB-INF/:$app_home/META-INF/:$app_home/WEB-INF/lib/*:$app_home/WEB-INF/classes/:$app_home/WEB-INF/classes/META-INF/" com.xasecure.patch.${className}` + msg=`$JAVA_HOME/bin/java -cp "$app_home/WEB-INF/classes/conf:$app_home/WEB-INF/classes/lib/*:$app_home/WEB-INF/:$app_home/META-INF/:$app_home/WEB-INF/lib/*:$app_home/WEB-INF/classes/:$app_home/WEB-INF/classes/META-INF/" org.apache.ranger.patch.${className}` check_ret_status $? "Unable to apply patch:$javaPatch. $msg" touch ${tempFile} echo >> ${tempFile} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/src/bin/ranger_install.py ---------------------------------------------------------------------- diff --git a/security-admin/src/bin/ranger_install.py b/security-admin/src/bin/ranger_install.py index 6b1e324..75b5931 100644 --- a/security-admin/src/bin/ranger_install.py +++ b/security-admin/src/bin/ranger_install.py @@ -1021,13 +1021,13 @@ def call_keystore(libpath,aliasKey,aliasValue , filepath,getorcreate): finalLibPath = libpath.replace('\\','/').replace('//','/') finalFilePath = 'jceks://file/'+filepath.replace('\\','/').replace('//','/') if getorcreate == 'create': - commandtorun = ['java', '-cp', finalLibPath, 'com.hortonworks.credentialapi.buildks' ,'create', aliasKey, '-value', aliasValue, '-provider',finalFilePath] + commandtorun = ['java', '-cp', finalLibPath, 'org.apache.ranger.credentialapi.buildks' ,'create', aliasKey, '-value', aliasValue, '-provider',finalFilePath] p = Popen(commandtorun,stdin=PIPE, stdout=PIPE, stderr=PIPE) output, error = p.communicate() statuscode = p.returncode return statuscode elif getorcreate == 'get': - commandtorun = ['java', '-cp', finalLibPath, 'com.hortonworks.credentialapi.buildks' ,'get', aliasKey, '-provider',finalFilePath] + commandtorun = ['java', '-cp', finalLibPath, 'org.apache.ranger.credentialapi.buildks' ,'get', aliasKey, '-provider',finalFilePath] p = Popen(commandtorun,stdin=PIPE, stdout=PIPE, stderr=PIPE) output, error = p.communicate() statuscode = p.returncode http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/src/bin/ranger_usersync.py ---------------------------------------------------------------------- diff --git a/security-admin/src/bin/ranger_usersync.py b/security-admin/src/bin/ranger_usersync.py index dcf50ef..4309589 100644 --- a/security-admin/src/bin/ranger_usersync.py +++ b/security-admin/src/bin/ranger_usersync.py @@ -101,7 +101,7 @@ if service_entry: init_variables() jdk_options = get_jdk_options() class_path = get_ranger_classpath() - java_class = 'com.xasecure.authentication.UnixAuthenticationService' + java_class = 'org.apache.ranger.authentication.UnixAuthenticationService' class_arguments = '' dom = getDOMImplementation() http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/src/bin/service_start.py ---------------------------------------------------------------------- diff --git a/security-admin/src/bin/service_start.py b/security-admin/src/bin/service_start.py index 446bc54..ad2caa2 100644 --- a/security-admin/src/bin/service_start.py +++ b/security-admin/src/bin/service_start.py @@ -32,7 +32,7 @@ if service_entry: ranger_install.run_setup(cmd) jdk_options = ranger_install.get_jdk_options() class_path = ranger_install.get_ranger_classpath() - java_class = 'com.xasecure.server.tomcat.EmbededServer' + java_class = 'org.apache.ranger.server.tomcat.EmbededServer' class_arguments = '' from xml.dom.minidom import getDOMImplementation http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/src/main/java/com/xasecure/authentication/unix/jaas/RoleUserAuthorityGranter.java ---------------------------------------------------------------------- diff --git a/security-admin/src/main/java/com/xasecure/authentication/unix/jaas/RoleUserAuthorityGranter.java b/security-admin/src/main/java/com/xasecure/authentication/unix/jaas/RoleUserAuthorityGranter.java deleted file mode 100644 index b025568..0000000 --- a/security-admin/src/main/java/com/xasecure/authentication/unix/jaas/RoleUserAuthorityGranter.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package com.xasecure.authentication.unix.jaas; - -import java.security.Principal; -import java.util.Collections; -import java.util.Set; - -import org.springframework.security.authentication.jaas.AuthorityGranter; - -public class RoleUserAuthorityGranter implements AuthorityGranter { - - @Override - public Set<String> grant(Principal principal) { - if (principal instanceof UnixGroupPrincipal) { - Collections.singleton(principal.getName()); - } - else { - Collections.singleton("ROLE_USER"); - } - return null; - } -} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/security-admin/src/main/java/com/xasecure/biz/AssetConnectionMgr.java ---------------------------------------------------------------------- diff --git a/security-admin/src/main/java/com/xasecure/biz/AssetConnectionMgr.java b/security-admin/src/main/java/com/xasecure/biz/AssetConnectionMgr.java deleted file mode 100644 index 7df1321..0000000 --- a/security-admin/src/main/java/com/xasecure/biz/AssetConnectionMgr.java +++ /dev/null @@ -1,425 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - - package com.xasecure.biz; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.concurrent.TimeUnit; - -import org.apache.log4j.Logger; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Scope; -import org.springframework.stereotype.Component; - -import com.xasecure.common.AppConstants; -import com.xasecure.common.JSONUtil; -import com.xasecure.common.StringUtil; -import com.xasecure.common.TimedEventUtil; -import com.xasecure.db.XADaoManager; -import com.xasecure.entity.XXAsset; -import com.xasecure.hadoop.client.HadoopFS; -import com.xasecure.hbase.client.HBaseClient; -import com.xasecure.hive.client.HiveClient; -import com.xasecure.knox.client.KnoxClient; -import com.xasecure.storm.client.StormClient; -import com.xasecure.service.XAssetService; -import com.xasecure.view.VXAsset; - -@Component -@Scope("singleton") -public class AssetConnectionMgr { - - private static Logger logger = Logger.getLogger(AssetConnectionMgr.class); - - protected HashMap<String, HadoopFS> hadoopConnectionCache; - protected HashMap<String, HiveClient> hiveConnectionCache; - protected HashMap<String, HBaseClient> hbaseConnectionCache; - - protected HashMap<String, Boolean> repoConnectStatusMap; - - @Autowired - protected JSONUtil jsonUtil; - - @Autowired - protected StringUtil stringUtil; - - @Autowired - protected XADaoManager xADaoManager; - - @Autowired - XAssetService xAssetService; - - public AssetConnectionMgr(){ - hadoopConnectionCache = new HashMap<String, HadoopFS>(); - hiveConnectionCache = new HashMap<String, HiveClient>(); - hbaseConnectionCache = new HashMap<String, HBaseClient>(); - repoConnectStatusMap = new HashMap<String, Boolean>(); - } - - public HadoopFS getHadoopConnection(final String dataSourceName) { - HadoopFS hadoopFS = null; - XXAsset asset = xADaoManager.getXXAsset().findByAssetName(dataSourceName); - if (asset != null) { - // get it from the cache - synchronized (hadoopConnectionCache) { - hadoopFS = hadoopConnectionCache.get(asset.getName()); - if (hadoopFS == null) { - // if it doesn't exist in cache then create the connection - String config = asset.getConfig(); - if(!stringUtil.isEmpty(config)){ - config=xAssetService.getConfigWithDecryptedPassword(config); - } - // FIXME remove this once we start using putting config for - // default asset "hadoopdev" (should come from properties) - if (stringUtil.isEmpty(config) - && asset.getName().equals("hadoopdev")) { - - final Callable<HadoopFS> connectHDFS = new Callable<HadoopFS>() { - @Override - public HadoopFS call() throws Exception { - return new HadoopFS(dataSourceName); - } - }; - - try { - hadoopFS = TimedEventUtil.timedTask(connectHDFS, 10, TimeUnit.SECONDS); - } catch(Exception e){ - logger.error("Error establishing connection for HDFS repository : " - + dataSourceName, e); - } - - } else if (!stringUtil.isEmpty(config)) { - final HashMap<String, String> configMap = (HashMap<String, String>) jsonUtil - .jsonToMap(config); - final String assetName = asset.getName(); - - final Callable<HadoopFS> connectHDFS = new Callable<HadoopFS>() { - @Override - public HadoopFS call() throws Exception { - return new HadoopFS(assetName, configMap); - } - }; - - try { - hadoopFS = TimedEventUtil.timedTask(connectHDFS, 5, TimeUnit.SECONDS); - } catch(Exception e){ - logger.error("Error establishing connection for HDFS repository : " - + dataSourceName + " using configuration : " +config, e); - } - // put it into the cache - } else { - logger.error("Connection Config not defined for asset :" - + asset.getName(), new Throwable()); - } - hadoopConnectionCache.put(asset.getName(), hadoopFS); - repoConnectStatusMap.put(asset.getName(), true); - } else { - List<String> testConnect = hadoopFS.listFiles("/", "*"); - if(testConnect == null){ - hadoopConnectionCache.remove(dataSourceName); - hadoopFS = getHadoopConnection(dataSourceName); - } - } - } - } else { - logger.error("Asset not found with name "+dataSourceName, new Throwable()); - } - - return hadoopFS; - } - - public HiveClient getHiveConnection(final String dataSourceName) { - HiveClient hiveClient = null; - XXAsset asset = xADaoManager.getXXAsset().findByAssetName(dataSourceName); - if (asset != null) { - // get it from the cache - synchronized (hiveConnectionCache) { - hiveClient = hiveConnectionCache.get(asset.getName()); - if (hiveClient == null) { - String config = asset.getConfig(); - if (!stringUtil.isEmpty(config)) { - config=xAssetService.getConfigWithDecryptedPassword(config); - final HashMap<String, String> configMap = (HashMap<String, String>) jsonUtil - .jsonToMap(config); - - final Callable<HiveClient> connectHive = new Callable<HiveClient>() { - @Override - public HiveClient call() throws Exception { - return new HiveClient(dataSourceName, configMap); - } - }; - try { - hiveClient = TimedEventUtil.timedTask(connectHive, 5, TimeUnit.SECONDS); - } catch(Exception e){ - logger.error("Error connecting hive repository : "+ - dataSourceName +" using config : "+ config, e); - } - hiveConnectionCache.put(asset.getName(), hiveClient); - repoConnectStatusMap.put(asset.getName(), true); - } else { - logger.error("Connection Config not defined for asset :" - + asset.getName(), new Throwable()); - } - } else { - try { - List<String> testConnect = hiveClient.getDatabaseList("*"); - } catch(Exception e) { - hiveConnectionCache.remove(dataSourceName); - hiveClient = getHiveConnection(dataSourceName); - } - } - } - } else { - logger.error("Asset not found with name "+dataSourceName, new Throwable()); - } - return hiveClient; - } - - public KnoxClient getKnoxClient(String dataSourceName) { - KnoxClient knoxClient = null; - logger.debug("Getting knoxClient for datasource: " + dataSourceName); - XXAsset asset = xADaoManager.getXXAsset().findByAssetName(dataSourceName); - if (asset == null) { - logger.error("Asset not found with name " + dataSourceName, new Throwable()); - } else { - knoxClient = getKnoxClient(asset); - } - return knoxClient; - } - - public KnoxClient getKnoxClient(XXAsset asset) { - KnoxClient knoxClient = null; - if (asset == null) { - logger.error("Asset is null", new Throwable()); - } else { - String config = asset.getConfig(); - if(!stringUtil.isEmpty(config)){ - config=xAssetService.getConfigWithDecryptedPassword(config); - } - knoxClient = getKnoxClientByConfig(config); - } - return knoxClient; - } - - public KnoxClient getKnoxClientByConfig(String config) { - KnoxClient knoxClient = null; - if (config == null || config.trim().isEmpty()) { - logger.error("Connection Config is empty"); - - } else { - final HashMap<String, String> configMap = (HashMap<String, String>) jsonUtil - .jsonToMap(config); - String knoxUrl = configMap.get("knox.url"); - String knoxAdminUser = configMap.get("username"); - String knoxAdminPassword = configMap.get("password"); - knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); - } - return knoxClient; - } - - public KnoxClient getKnoxClient(String dataSourceName, - Map<String, String> configMap) { - KnoxClient knoxClient = null; - logger.debug("Getting knoxClient for datasource: " + dataSourceName + - "configMap: " + configMap); - if (configMap == null || configMap.isEmpty()) { - logger.error("Connection ConfigMap is empty"); - } else { - String knoxUrl = configMap.get("knox.url"); - String knoxAdminUser = configMap.get("username"); - String knoxAdminPassword = configMap.get("password"); - knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); - } - return knoxClient; - } - - - public static KnoxClient getKnoxClient(final String knoxUrl, String knoxAdminUser, String knoxAdminPassword) { - KnoxClient knoxClient = null; - if (knoxUrl == null || knoxUrl.isEmpty()) { - logger.error("Can not create KnoxClient: knoxUrl is empty"); - } else if (knoxAdminUser == null || knoxAdminUser.isEmpty()) { - logger.error("Can not create KnoxClient: knoxAdminUser is empty"); - } else if (knoxAdminPassword == null || knoxAdminPassword.isEmpty()) { - logger.error("Can not create KnoxClient: knoxAdminPassword is empty"); - } else { - knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); - } - return knoxClient; - } - - public HBaseClient getHBaseConnection(final String dataSourceName) { - HBaseClient client = null; - XXAsset asset = xADaoManager.getXXAsset().findByAssetName( - dataSourceName); - if (asset != null) { - // get it from the cache - synchronized (hbaseConnectionCache) { - client = hbaseConnectionCache.get(asset.getName()); - if (client == null) { - // if it doesn't exist in cache then create the connection - String config = asset.getConfig(); - if(!stringUtil.isEmpty(config)){ - config=xAssetService.getConfigWithDecryptedPassword(config); - } - // FIXME remove this once we start using putting config for - // default asset "dev-hive" (should come from properties) - if (stringUtil.isEmpty(config) - && asset.getName().equals("hbase")) { - - final Callable<HBaseClient> connectHBase = new Callable<HBaseClient>() { - @Override - - public HBaseClient call() throws Exception { - HBaseClient hBaseClient=null; - if(dataSourceName!=null){ - try{ - hBaseClient=new HBaseClient(dataSourceName); - }catch(Exception ex){ - logger.error("Error connecting HBase repository : ", ex); - } - } - return hBaseClient; - } - - }; - - try { - if(connectHBase!=null){ - client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); - } - } catch(Exception e){ - logger.error("Error connecting HBase repository : " + dataSourceName); - } - } else if (!stringUtil.isEmpty(config)) { - final HashMap<String, String> configMap = (HashMap<String, String>) jsonUtil - .jsonToMap(config); - - final Callable<HBaseClient> connectHBase = new Callable<HBaseClient>() { - @Override - public HBaseClient call() throws Exception { - HBaseClient hBaseClient=null; - if(dataSourceName!=null && configMap!=null){ - try{ - hBaseClient=new HBaseClient(dataSourceName,configMap); - }catch(Exception ex){ - logger.error("Error connecting HBase repository : ", ex); - } - } - return hBaseClient; - } - }; - - try { - if(connectHBase!=null){ - client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); - } - } catch(Exception e){ - logger.error("Error connecting HBase repository : "+ - dataSourceName +" using config : "+ config); - } - - } else { - logger.error( - "Connection Config not defined for asset :" - + asset.getName(), new Throwable()); - } - if(client!=null){ - hbaseConnectionCache.put(asset.getName(), client); - } - } else { - List<String> testConnect = client.getTableList(".\\*"); - if(testConnect == null){ - hbaseConnectionCache.remove(dataSourceName); - client = getHBaseConnection(dataSourceName); - } - } - repoConnectStatusMap.put(asset.getName(), true); - } - } else { - logger.error("Asset not found with name " + dataSourceName, - new Throwable()); - } - - return client; - } - - public boolean destroyConnection(VXAsset asset) { - boolean result = false; - if (asset != null) { - if(asset.getAssetType() == AppConstants.ASSET_HDFS) { - synchronized (hadoopConnectionCache) { - - @SuppressWarnings("unused") - HadoopFS hadoopFS = hadoopConnectionCache.get(asset.getName()); - // TODO need a way to close the connection - hadoopConnectionCache.remove(asset.getName()); - repoConnectStatusMap.remove(asset.getName()); - - } - } else if(asset.getAssetType() == AppConstants.ASSET_HIVE) { - synchronized (hadoopConnectionCache) { - - HiveClient hiveClient = hiveConnectionCache.get(asset.getName()); - if(hiveClient != null) { - hiveClient.close(); - } - hadoopConnectionCache.remove(asset.getName()); - repoConnectStatusMap.remove(asset.getName()); - - } - } else if (asset.getAssetType() == AppConstants.ASSET_HBASE) { - synchronized (hbaseConnectionCache) { - @SuppressWarnings("unused") - HBaseClient hBaseClient = hbaseConnectionCache.get(asset - .getName()); - // TODO need a way to close the connection - hbaseConnectionCache.remove(asset.getName()); - repoConnectStatusMap.remove(asset.getName()); - - } - } - result = true; - } - return result; - } - - public HadoopFS resetHadoopConnection(final String dataSourceName){ - hadoopConnectionCache.remove(dataSourceName); - return getHadoopConnection(dataSourceName); - } - - public static StormClient getStormClient(final String stormUIURL, String userName, String password) { - StormClient stormClient = null; - if (stormUIURL == null || stormUIURL.isEmpty()) { - logger.error("Can not create KnoxClient: stormUIURL is empty"); - } else if (userName == null || userName.isEmpty()) { - logger.error("Can not create KnoxClient: knoxAdminUser is empty"); - } else if (password == null || password.isEmpty()) { - logger.error("Can not create KnoxClient: knoxAdminPassword is empty"); - } else { - stormClient = new StormClient(stormUIURL, userName, password); - } - return stormClient; - } - -}
