http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java new file mode 100644 index 0000000..b505924 --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java @@ -0,0 +1,190 @@ +package org.apache.ranger.services.hive.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.log4j.Logger; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.plugin.util.TimedEventUtil; + +public class HiveResourceMgr { + + public static final Logger LOG = Logger.getLogger(HiveResourceMgr.class); + + private static final String DATABASE = "database"; + private static final String TABLE = "table"; + private static final String UDF = "udf"; + private static final String COLUMN = "column"; + + + public static HashMap<String, Object> testConnection(String serviceName, Map<String, String> configs) throws Exception { + HashMap<String, Object> ret = null; + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveResourceMgr.testConnection ServiceName: "+ serviceName + "Configs" + configs ) ; + } + + try { + ret = HiveClient.testConnection(serviceName, configs); + } catch (Exception e) { + LOG.error("<== HiveResourceMgr.testConnection Error: " + e) ; + throw e; + } + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveResourceMgr.testConnection Result : "+ ret ) ; + } + + return ret; + } + + public static List<String> getHiveResources(String serviceName, Map<String, String> configs,ResourceLookupContext context) throws Exception { + + + String userInput = context.getUserInput(); + String resource = context.getResourceName(); + Map<String, List<String>> resourceMap = context.getResources(); + List<String> resultList = null; + List<String> databaseList = null; + List<String> tableList = null; + List<String> udfList = null; + List<String> columnList = null; + String databaseName = null; + String tableName = null; + String udfName = null; + String columnName = null; + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveResourceMgr.getHiveResources() UserInput: \""+ userInput + "\" resource : " + resource + " resourceMap: " + resourceMap) ; + } + + if ( userInput != null && resource != null) { + if ( resourceMap != null && !resourceMap.isEmpty() && + ( resourceMap.get(DATABASE) != null || resourceMap.get(TABLE) != null || + resourceMap.get(UDF) != null || resourceMap.get(COLUMN) != null ) ) { + switch (resource.trim().toLowerCase()) { + case DATABASE: + databaseName = userInput; + databaseList = resourceMap.get(DATABASE); + break; + case TABLE: + tableName = userInput; + tableList = resourceMap.get(TABLE); + break; + case UDF: + udfName = userInput; + udfList = resourceMap.get(UDF); + break; + case COLUMN: + columnName = userInput; + columnList = resourceMap.get(COLUMN); + break; + default: + break; + } + } else { + switch (resource.trim().toLowerCase()) { + case DATABASE: + databaseName = userInput; + break; + case TABLE: + tableName = userInput; + break; + case UDF: + udfName = userInput; + break; + case COLUMN: + columnName = userInput; + break; + default: + break; + } + } + } + + if (serviceName != null && userInput != null) { + try { + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveResourceMgr.getHiveResources() UserInput: "+ userInput + " configs: " + configs + " context: " + context) ; + } + + final HiveClient hiveClient = new HiveConnectionMgr().getHiveConnection(serviceName, configs); + + Callable<List<String>> callableObj = null; + final String finalDbName; + final String finalColName; + final String finalTableName; + + final List<String> finaldatabaseList = databaseList; + final List<String> finaltableList = tableList; + final List<String> finaldfsList = udfList; + final List<String> finalcolumnList = columnList; + + if (hiveClient != null && databaseName != null + && !databaseName.isEmpty()) { + if (tableName != null && !tableName.isEmpty()) { + if (columnName != null && !columnName.isEmpty()) { + columnName += "*"; + finalColName = columnName; + finalDbName = databaseName; + finalTableName = tableName; + + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return hiveClient.getColumnList(finalColName, + finaldatabaseList, + finaltableList, + finalcolumnList); + } + }; + } else { + tableName += "*"; + finalTableName = tableName; + finalDbName = databaseName; + callableObj = new Callable<List<String>>() { + + @Override + public List<String> call() { + return hiveClient.getTableList(finalTableName, + finaldatabaseList, + finaltableList); + } + + }; + } + } else { + databaseName += "*"; + finalDbName = databaseName; + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return hiveClient.getDatabaseList(finalDbName, + finaldatabaseList); + } + }; + + } + + synchronized (hiveClient) { + resultList = TimedEventUtil.timedTask(callableObj, 5, + TimeUnit.SECONDS); + } + } + } catch (Exception e) { + LOG.error("Unable to get hive resources.", e); + } + } + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveResourceMgr.getHiveResources() databaseName:" + databaseName + " tableName: " + tableName + " columnName: " + columnName + "Result :" + resultList) ; + } + return resultList; + + } + +}
http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/test/java/org/apache/ranger/services/hive/client/HiveClientTester.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/test/java/org/apache/ranger/services/hive/client/HiveClientTester.java b/hive-agent/src/test/java/org/apache/ranger/services/hive/client/HiveClientTester.java new file mode 100644 index 0000000..9cd1cd0 --- /dev/null +++ b/hive-agent/src/test/java/org/apache/ranger/services/hive/client/HiveClientTester.java @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hive.client; + + +import java.util.HashMap; +import java.util.List; +import java.util.Properties; + + +public class HiveClientTester { + + public static void main(String[] args) throws Throwable { + + HiveClient hc = null ; + + if (args.length <= 2) { + System.err.println("USAGE: java " + HiveClientTester.class.getName() + " dataSourceName propertyFile <databaseName> <tableName> <columnName>") ; + System.exit(1) ; + } + + + try { + + Properties conf = new Properties() ; + conf.load(HiveClientTester.class.getClassLoader().getResourceAsStream(args[1])); + + HashMap<String,String> prop = new HashMap<String,String>() ; + for(Object key : conf.keySet()) { + Object val = conf.get(key) ; + prop.put((String)key, (String)val) ; + } + + + hc = new HiveClient(args[0], prop) ; + + + if (args.length == 3) { + List<String> dbList = hc.getDatabaseList(args[2],null) ; + if (dbList.size() == 0) { + System.out.println("No database found with db filter [" + args[2] + "]") ; + } + else { + for (String str : dbList ) { + System.out.println("database: " + str ) ; + } + } + } + else if (args.length == 4) { + List<String> tableList = hc.getTableList(args[3], null, null) ; + if (tableList.size() == 0) { + System.out.println("No tables found under database[" + args[2] + "] with table filter [" + args[3] + "]") ; + } + else { + for(String str : tableList) { + System.out.println("Table: " + str) ; + } + } + } + else if (args.length == 5) { + List<String> columnList = hc.getColumnList(args[4], null, null, null) ; + if (columnList.size() == 0) { + System.out.println("No columns found for db:" + args[2] + ", table: [" + args[3] + "], with column filter [" + args[4] + "]") ; + } + else { + for (String str : columnList ) { + System.out.println("Column: " + str) ; + } + } + } + + } + finally { + if (hc != null) { + hc.close(); + } + } + + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/test/java/org/apache/ranger/services/hive/client/TestRangerServiceHive.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/test/java/org/apache/ranger/services/hive/client/TestRangerServiceHive.java b/hive-agent/src/test/java/org/apache/ranger/services/hive/client/TestRangerServiceHive.java new file mode 100644 index 0000000..7e84c6b --- /dev/null +++ b/hive-agent/src/test/java/org/apache/ranger/services/hive/client/TestRangerServiceHive.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hive.client; + +import static org.junit.Assert.*; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.client.HadoopException; +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.plugin.store.ServiceStore; +import org.apache.ranger.plugin.store.ServiceStoreFactory; +import org.apache.ranger.services.hive.RangerServiceHive; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + +public class TestRangerServiceHive { + + static final String sdName = "svcDef-Hive"; + static final String serviceName = "HiveDef"; + HashMap<String, Object> responseData = null; + Map<String, String> configs = null; + RangerServiceHive svcHive = null; + RangerServiceDef sd = null; + RangerService svc = null; + ResourceLookupContext lookupContext = null; + + + @Before + public void setup() { + configs = new HashMap<String,String>(); + lookupContext = new ResourceLookupContext(); + + buildHbaseConnectionConfig(); + buildLookupContext(); + + sd = new RangerServiceDef(sdName, "org.apache.ranger.services.hive.RangerServiceHive", "TestHiveService", "test servicedef description", null, null, null, null, null); + svc = new RangerService(sdName, serviceName, "unit test hive resource lookup and validateConfig", configs); + svcHive = new RangerServiceHive(); + svcHive.init(sd, svc); + svcHive.init(); + } + + @Test + public void testValidateConfig() { + + HashMap<String,Object> ret = null; + String errorMessage = null; + + try { + ret = svcHive.validateConfig(); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + } + + + @Test + public void testLookUpResource() { + List<String> ret = new ArrayList<String>(); + String errorMessage = null; + try { + ret = svcHive.lookupResource(lookupContext); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNull(ret); + } + + } + + public void buildHbaseConnectionConfig() { + configs.put("username", "hiveuser"); + configs.put("password", "*******"); + configs.put("jdbc.driverClassName", "org.apache.hive.jdbc.HiveDriver"); + configs.put("jdbc.url ", "jdbc:hive2://localhost:10000/default"); + } + + public void buildLookupContext() { + Map<String, List<String>> resourceMap = new HashMap<String,List<String>>(); + resourceMap.put("database", null); + lookupContext.setUserInput("x"); + lookupContext.setResourceName("database"); + lookupContext.setResources(resourceMap); + } + + @After + public void tearDown() { + sd = null; + svc = null; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/test/resource/log4j.properties ---------------------------------------------------------------------- diff --git a/hive-agent/src/test/resource/log4j.properties b/hive-agent/src/test/resource/log4j.properties new file mode 100644 index 0000000..2cbd6ee --- /dev/null +++ b/hive-agent/src/test/resource/log4j.properties @@ -0,0 +1,16 @@ +# Define some default values that can be overridden by system properties +ranger.root.logger=INFO,console +# Define the root logger to the system property "hbase.root.logger". +log4j.rootLogger=${ranger.root.logger} + +# Logging Threshold +log4j.threshold=ALL + +# +# console +# Add "console" to rootlogger above if you want to use this +# +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/pom.xml ---------------------------------------------------------------------- diff --git a/knox-agent/pom.xml b/knox-agent/pom.xml index 8e4cdb7..0fdbfa0 100644 --- a/knox-agent/pom.xml +++ b/knox-agent/pom.xml @@ -33,14 +33,73 @@ </parent> <dependencies> <dependency> - <groupId>org.apache.knox</groupId> - <artifactId>gateway-spi</artifactId> - <version>0.5.0</version> + <groupId>org.apache.knox</groupId> + <artifactId>gateway-spi</artifactId> + <version>0.5.0</version> </dependency> <dependency> - <groupId>javax.servlet</groupId> - <artifactId>javax.servlet-api</artifactId> - <version>${javax.servlet.version}</version> + <groupId>javax.servlet</groupId> + <artifactId>javax.servlet-api</artifactId> + <version>${javax.servlet.version}</version> + </dependency> + <dependency> + <groupId>org.apache.calcite</groupId> + <artifactId>calcite-core</artifactId> + <version>${calcite.version}</version> + </dependency> + <dependency> + <groupId>org.apache.calcite</groupId> + <artifactId>calcite-avatica</artifactId> + <version>${calcite.version}</version> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-api</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-runtime-library</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-runtime-internals</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-mapreduce</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-common</artifactId> + <version>${hive.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-service</artifactId> + <version>${hive.version}</version> </dependency> <dependency> <groupId>security_plugins.ranger-plugins-common</groupId> http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/main/java/org/apache/ranger/services/knox/RangerServiceKnox.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/main/java/org/apache/ranger/services/knox/RangerServiceKnox.java b/knox-agent/src/main/java/org/apache/ranger/services/knox/RangerServiceKnox.java new file mode 100644 index 0000000..4798b81 --- /dev/null +++ b/knox-agent/src/main/java/org/apache/ranger/services/knox/RangerServiceKnox.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.ranger.services.knox; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.RangerBaseService; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.services.knox.client.KnoxResourceMgr; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class RangerServiceKnox extends RangerBaseService { + + private static final Log LOG = LogFactory.getLog(RangerServiceKnox.class); + + RangerService service; + RangerServiceDef serviceDef; + Map<String, String> configs; + String serviceName; + + public RangerServiceKnox() { + super(); + } + + @Override + public void init(RangerServiceDef serviceDef, RangerService service) { + super.init(serviceDef, service); + init(); + } + + @Override + public HashMap<String,Object> validateConfig() throws Exception { + HashMap<String, Object> ret = new HashMap<String, Object>(); + if(LOG.isDebugEnabled()) { + LOG.debug("==> RangerServiceKnox.validateConfig Service: (" + service + " )"); + } + if ( configs != null) { + try { + ret = KnoxResourceMgr.validateConfig(service.getName(), service.getConfigs()); + } catch (Exception e) { + LOG.error("<== RangerServiceKnox.validateConfig Error:" + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceKnox.validateConfig Response : (" + ret + " )"); + } + return ret; + } + + @Override + public List<String> lookupResource(ResourceLookupContext context) throws Exception { + + List<String> ret = new ArrayList<String>(); + if(LOG.isDebugEnabled()) { + LOG.debug("==> RangerServiceKnox.lookupResource Context: (" + context + ")"); + } + if (context != null) { + try { + ret = KnoxResourceMgr.getKnoxResources(service.getName(),service.getConfigs(),context); + + } catch (Exception e) { + LOG.error( "<== RangerServiceKnox.lookupResource Error : " + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceKnox.lookupResource Response: (" + ret + ")"); + } + return ret; + } + + public void init() { + service = getService(); + serviceDef = getServiceDef(); + serviceName = service.getName(); + configs = service.getConfigs(); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java new file mode 100644 index 0000000..0a23f57 --- /dev/null +++ b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java @@ -0,0 +1,397 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.services.knox.client; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.ranger.plugin.client.BaseClient; +import org.apache.ranger.plugin.client.HadoopException; +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +import com.sun.jersey.api.client.Client; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.api.client.WebResource; +import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter; + +public class KnoxClient { + + private static final String EXPECTED_MIME_TYPE = "application/json"; + private static final Log LOG = LogFactory.getLog(KnoxClient.class); + + private String knoxUrl; + private String userName; + private String password; + + /* + Sample curl calls to Knox to discover topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin + */ + + public KnoxClient(String knoxUrl, String userName, String password) { + LOG.debug("Constructed KnoxClient with knoxUrl: " + knoxUrl + + ", userName: " + userName); + this.knoxUrl = knoxUrl; + this.userName = userName; + this.password = password; + } + + public List<String> getTopologyList(String topologyNameMatching,List<String> knoxTopologyList) { + + // sample URI: https://hdp.example.com:8443/gateway/admin/api/v1/topologies + LOG.debug("Getting Knox topology list for topologyNameMatching : " + + topologyNameMatching); + List<String> topologyList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if ( topologyNameMatching == null || topologyNameMatching.trim().isEmpty()) { + topologyNameMatching = ""; + } + try { + + Client client = null; + ClientResponse response = null; + + try { + client = Client.create();; + + client.addFilter(new HTTPBasicAuthFilter(userName, password)); + WebResource webResource = client.resource(knoxUrl); + response = webResource.accept(EXPECTED_MIME_TYPE) + .get(ClientResponse.class); + LOG.debug("Knox topology list response: " + response); + if (response != null) { + + if (response.getStatus() == 200) { + String jsonString = response.getEntity(String.class); + LOG.debug("Knox topology list response JSON string: "+ jsonString); + + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode rootNode = objectMapper.readTree(jsonString); + JsonNode topologyNode = rootNode.findValue("topology"); + if (topologyNode == null) { + return topologyList; + } + Iterator<JsonNode> elements = topologyNode.getElements(); + while (elements.hasNext()) { + JsonNode element = elements.next(); + String topologyName = element.get("name").getValueAsText(); + LOG.debug("Found Knox topologyName: " + topologyName); + if ( knoxTopologyList != null && knoxTopologyList.contains(topologyName)) { + continue; + } + if (topologyName.startsWith(topologyNameMatching)) { + topologyList.add(topologyName); + } + } + } else { + LOG.error("Got invalid REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus()); + } + + } else { + String msgDesc = "Unable to get a valid response for " + + "isFileChanged() call for KnoxUrl : [" + knoxUrl + + "] - got null response."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, + msgDesc + errMsg, null, null); + throw hdpException; + } + + } finally { + if (response != null) { + response.close(); + } + if (client != null) { + client.destroy(); + } + } + } catch (HadoopException he) { + throw he; + } catch (Throwable t) { + String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + "."; + HadoopException hdpException = new HadoopException(msgDesc, t); + LOG.error(msgDesc, t); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(t), msgDesc + errMsg, null, null); + throw hdpException; + } finally { + } + return topologyList; + } + + + public List<String> getServiceList(String topologyName, String serviceNameMatching, List<String> knoxServiceList) { + + // sample URI: .../admin/api/v1/topologies/<topologyName> + + List<String> serviceList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if ( serviceNameMatching == null || serviceNameMatching.trim().isEmpty()) { + serviceNameMatching = ""; + } + try { + + Client client = null; + ClientResponse response = null; + + try { + client = Client.create();; + + client.addFilter(new HTTPBasicAuthFilter(userName, password)); + + WebResource webResource = client.resource(knoxUrl + "/" + topologyName); + + response = webResource.accept(EXPECTED_MIME_TYPE) + .get(ClientResponse.class); + LOG.debug("Knox service lookup response: " + response); + if (response != null) { + + if (response.getStatus() == 200) { + String jsonString = response.getEntity(String.class); + LOG.debug("Knox service look up response JSON string: " + jsonString); + + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode rootNode = objectMapper.readTree(jsonString); + JsonNode topologyNode = rootNode.findValue("topology"); + JsonNode servicesNode = topologyNode.get("services"); + Iterator<JsonNode> services = servicesNode.getElements(); + while (services.hasNext()) { + JsonNode service = services.next(); + String serviceName = service.get("role").getValueAsText(); + LOG.debug("Knox serviceName: " + serviceName); + if ( knoxServiceList != null && knoxServiceList.contains(serviceName)) { + continue; + } + if (serviceName.startsWith(serviceNameMatching)) { + serviceList.add(serviceName); + } + } + } else { + LOG.error("Got invalid REST response from: "+ knoxUrl + ", responsStatus: " + response.getStatus()); + } + + } else { + String msgDesc = "Unable to get a valid response for " + + "isFileChanged() call for KnoxUrl : [" + knoxUrl + + "] - got null response."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, + msgDesc + errMsg, null, null); + throw hdpException; + } + + } finally { + if (response != null) { + response.close(); + } + if (client != null) { + client.destroy(); + } + } + } catch (HadoopException he) { + throw he; + } catch (Throwable t) { + String msgDesc = "Exception on REST call to KnoxUrl : " + knoxUrl + "."; + HadoopException hdpException = new HadoopException(msgDesc, t); + LOG.error(msgDesc, t); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(t), msgDesc + errMsg, null, null); + throw hdpException; + + } finally { + } + return serviceList; + } + + public static void main(String[] args) { + + KnoxClient knoxClient = null; + + if (args.length != 3) { + System.err.println("USAGE: java " + KnoxClient.class.getName() + + " knoxUrl userName password [sslConfigFileName]"); + System.exit(1); + } + + try { + knoxClient = new KnoxClient(args[0], args[1], args[2]); + List<String> topologyList = knoxClient.getTopologyList("",null); + if ((topologyList == null) || topologyList.isEmpty()) { + System.out.println("No knox topologies found"); + } else { + for (String topology : topologyList) { + System.out.println("Found Topology: " + topology); + List<String> serviceList = knoxClient.getServiceList(topology, "",null); + if ((serviceList == null) || serviceList.isEmpty()) { + System.out.println("No services found for knox topology: " + topology); + } else { + for (String service : serviceList) { + System.out.println(" Found service for topology: " + service +", " + topology); + } + } + } + } + } finally { + } + } + + public static HashMap<String, Object> testConnection(String serviceName, + Map<String, String> configs) { + + List<String> strList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + boolean connectivityStatus = false; + HashMap<String, Object> responseData = new HashMap<String, Object>(); + + KnoxClient knoxClient = getKnoxClient(serviceName, configs); + strList = getKnoxResources(knoxClient, "", null,null,null); + + if (strList != null && (strList.size() != 0)) { + connectivityStatus = true; + } + + if (connectivityStatus) { + String successMsg = "TestConnection Successful"; + BaseClient.generateResponseDataMap(connectivityStatus, successMsg, successMsg, + null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any topologies/services using given parameters."; + BaseClient.generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg, + null, null, responseData); + } + + return responseData; + } + + public static KnoxClient getKnoxClient(String serviceName, + Map<String, String> configs) { + KnoxClient knoxClient = null; + LOG.debug("Getting knoxClient for ServiceName: " + serviceName + + "configMap: " + configs.toString()); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if ( configs != null && !configs.isEmpty()) { + String knoxUrl = configs.get("knox.url"); + String knoxAdminUser = configs.get("username"); + String knoxAdminPassword = configs.get("password"); + knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, + knoxAdminPassword); + } else { + String msgDesc = "Could not connect as Connection ConfigMap is empty."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, null, + null); + throw hdpException; + } + return knoxClient; + } + + public static List<String> getKnoxResources(final KnoxClient knoxClient, + String topologyName, String serviceName, List<String> knoxTopologyList, List<String> knoxServiceList) { + + List<String> resultList = new ArrayList<String>(); + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + try { + if (knoxClient == null) { + // LOG.error("Unable to get knox resources: knoxClient is null"); + // return new ArrayList<String>(); + String msgDesc = "Unable to get knox resources: knoxClient is null."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc); + hdpException.generateResponseDataMap(false, msgDesc, msgDesc + errMsg, + null, null); + throw hdpException; + } + + final Callable<List<String>> callableObj; + if (serviceName != null) { + final String finalServiceNameMatching = serviceName.trim(); + final String finalTopologyName = topologyName; + final List<String> finalknoxServiceList = knoxServiceList; + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return knoxClient.getServiceList(finalTopologyName, + finalServiceNameMatching,finalknoxServiceList); + } + }; + + } else { + final String finalTopologyNameMatching = (topologyName == null) ? "" + : topologyName.trim(); + final List<String> finalknoxTopologyList = knoxTopologyList; + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return knoxClient + .getTopologyList(finalTopologyNameMatching,finalknoxTopologyList); + } + }; + } + resultList = timedTask(callableObj, 5, TimeUnit.SECONDS); + + } catch (HadoopException he) { + throw he; + } catch (Exception e) { + String msgDesc = "Unable to get knox resources."; + LOG.error(msgDesc, e); + HadoopException hdpException = new HadoopException(msgDesc); + + hdpException.generateResponseDataMap(false, + BaseClient.getMessage(e), msgDesc + errMsg, null, null); + throw hdpException; + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== KnoxClient.getKnoxResources() Result : "+ resultList ) ; + } + return resultList; + } + + public static <T> T timedTask(Callable<T> callableObj, long timeout, + TimeUnit timeUnit) throws Exception { + return callableObj.call(); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxConnectionMgr.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxConnectionMgr.java b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxConnectionMgr.java new file mode 100644 index 0000000..9c5fb22 --- /dev/null +++ b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxConnectionMgr.java @@ -0,0 +1,119 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.knox.client; + +import java.util.Map; + +import org.apache.log4j.Logger; +import org.apache.ranger.services.knox.client.KnoxClient; +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.store.ServiceStoreFactory; + + + +public class KnoxConnectionMgr { + + private static Logger LOG = Logger.getLogger(KnoxConnectionMgr.class); + + public KnoxClient getKnoxClientbyServiceName(String serviceName) { + KnoxClient knoxClient = null; + RangerService service = null; + + if(LOG.isDebugEnabled()) { + LOG.debug("Getting knoxClient for ServiceName: " + serviceName); + } + + try { + service = ServiceStoreFactory + .instance() + .getServiceStore() + .getServiceByName(serviceName); + + } catch (Exception ex) { + LOG.error("Service could not be found for the Service Name : " + serviceName , ex); + } + + if (service != null) { + knoxClient = getKnoxClientbyService(service); + } + return knoxClient; + } + + public KnoxClient getKnoxClientbyService(RangerService service) { + KnoxClient knoxClient = null; + Map<String,String> configs = null; + + if(LOG.isDebugEnabled()) { + LOG.debug("Getting knoxClient for ServiceName: " + service.toString()); + } + + if (service != null) { + configs = service.getConfigs(); + knoxClient = getKnoxClientByConfig(configs); + } + return knoxClient; + } + + public KnoxClient getKnoxClientByConfig( final Map<String,String> configs) { + KnoxClient knoxClient = null; + if (configs == null) { + LOG.error("Connection Config is empty"); + + } else { + + String knoxUrl = configs.get("knox.url"); + String knoxAdminUser = configs.get("username"); + String knoxAdminPassword = configs.get("password"); + knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); + } + return knoxClient; + } + + public KnoxClient getKnoxClient(String serviceName, + Map<String, String> configs) { + KnoxClient knoxClient = null; + LOG.debug("Getting knoxClient for datasource: " + serviceName + + "configMap: " + configs); + if (configs == null) { + LOG.error("Connection ConfigMap is empty"); + } else { + String knoxUrl = configs.get("knox.url"); + String knoxAdminUser = configs.get("username"); + String knoxAdminPassword = configs.get("password"); + knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); + } + return knoxClient; + } + + + public KnoxClient getKnoxClient(final String knoxUrl, String knoxAdminUser, String knoxAdminPassword) { + KnoxClient knoxClient = null; + if (knoxUrl == null || knoxUrl.isEmpty()) { + LOG.error("Can not create KnoxClient: knoxUrl is empty"); + } else if (knoxAdminUser == null || knoxAdminUser.isEmpty()) { + LOG.error("Can not create KnoxClient: knoxAdminUser is empty"); + } else if (knoxAdminPassword == null || knoxAdminPassword.isEmpty()) { + LOG.error("Can not create KnoxClient: knoxAdminPassword is empty"); + } else { + knoxClient = new KnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); + } + return knoxClient; + } +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxResourceMgr.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxResourceMgr.java b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxResourceMgr.java new file mode 100644 index 0000000..8aceb5f --- /dev/null +++ b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxResourceMgr.java @@ -0,0 +1,103 @@ +package org.apache.ranger.services.knox.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.log4j.Logger; +import org.apache.ranger.plugin.service.ResourceLookupContext; + + +public class KnoxResourceMgr { + + public static final Logger LOG = Logger.getLogger(KnoxResourceMgr.class); + + private static final String TOPOLOGY = "topology"; + private static final String SERVICE = "service"; + + public static HashMap<String, Object> validateConfig(String serviceName, Map<String, String> configs) throws Exception { + HashMap<String, Object> ret = null; + if (LOG.isDebugEnabled()) { + LOG.debug("==> KnoxResourceMgr.testConnection ServiceName: "+ serviceName + "Configs" + configs ) ; + } + try { + ret = KnoxClient.testConnection(serviceName, configs); + } catch (Exception e) { + LOG.error("<== KnoxResourceMgr.testConnection Error: " + e) ; + throw e; + } + + if(LOG.isDebugEnabled()) { + LOG.debug("<== KnoxResourceMgr.HdfsResourceMgr Result : "+ ret ) ; + } + return ret; + } + + public static List<String> getKnoxResources(String serviceName, Map<String, String> configs, ResourceLookupContext context) throws Exception { + + + String userInput = context.getUserInput(); + String resource = context.getResourceName(); + Map<String, List<String>> resourceMap = context.getResources(); + List<String> resultList = null; + List<String> knoxTopologyList = null; + List<String> knoxServiceList = null; + String knoxTopologyName = null; + String knoxServiceName = null; + + if ( userInput != null && resource != null) { + if ( resourceMap != null && !resourceMap.isEmpty() && + ( resourceMap.get(TOPOLOGY) != null || resourceMap.get(SERVICE) != null) ) { + switch (resource.trim().toLowerCase()) { + case TOPOLOGY: + knoxTopologyName = userInput; + knoxTopologyList = resourceMap.get(TOPOLOGY); + break; + case SERVICE: + knoxServiceName = userInput; + knoxServiceList = resourceMap.get(SERVICE); + break; + default: + break; + } + } else { + switch (resource.trim().toLowerCase()) { + case TOPOLOGY: + knoxTopologyName = userInput; + break; + case SERVICE: + knoxServiceName = userInput; + break; + default: + break; + } + } + } + + String knoxUrl = configs.get("knox.url"); + String knoxAdminUser = configs.get("username"); + String knoxAdminPassword = configs.get("password"); + + if (knoxUrl == null || knoxUrl.isEmpty()) { + LOG.error("Unable to get knox resources: knoxUrl is empty"); + return resultList; + } else if (knoxAdminUser == null || knoxAdminUser.isEmpty()) { + LOG.error("Unable to get knox resources: knoxAdminUser is empty"); + return resultList; + } else if (knoxAdminPassword == null || knoxAdminPassword.isEmpty()) { + LOG.error("Unable to get knox resources: knoxAdminPassword is empty"); + return resultList; + } + + if(LOG.isDebugEnabled()) { + LOG.debug("<== KnoxResourceMgr.getKnoxResources() knoxUrl: "+ knoxUrl + " knoxAdminUser: " + knoxAdminUser + " topologyName: " + knoxTopologyName + " KnoxServiceName: " + knoxServiceName) ; + } + + final KnoxClient knoxClient = new KnoxConnectionMgr().getKnoxClient(knoxUrl, knoxAdminUser, knoxAdminPassword); + resultList = KnoxClient.getKnoxResources(knoxClient, knoxTopologyName, knoxServiceName,knoxTopologyList,knoxServiceList); + + return resultList; + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java new file mode 100644 index 0000000..dd13cbb --- /dev/null +++ b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/KnoxClientTest.java @@ -0,0 +1,41 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ranger.services.knox.client; + +public class KnoxClientTest { + + + /* + Sample curl calls to knox REST API to discover topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies + curl -ivk -u admin:admin-password https://localhost:8443/gateway/admin/api/v1/topologies/admin + */ + + public static void main(String[] args) { + System.out.println(System.getProperty("java.class.path")); + System.setProperty("javax.net.ssl.trustStore", "/tmp/cacertswithknox)"); + String[] testArgs = { + "https://localhost:8443/gateway/admin/api/v1/topologies", + "admin", + "admin-password" + }; + KnoxClient.main(testArgs); + } + + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java ---------------------------------------------------------------------- diff --git a/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java new file mode 100644 index 0000000..6582d67 --- /dev/null +++ b/knox-agent/src/test/java/org/apache/ranger/services/knox/client/TestRangerServiceKnox.java @@ -0,0 +1,132 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.knox.client; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.client.HadoopException; +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.plugin.store.ServiceStore; +import org.apache.ranger.plugin.store.ServiceStoreFactory; +import org.apache.ranger.services.knox.RangerServiceKnox; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + + +public class TestRangerServiceKnox { + + static final String sdName = "svcDef-Knox"; + static final String serviceName = "KnoxDef"; + HashMap<String, Object> responseData = null; + Map<String, String> configs = null; + RangerServiceKnox svcKnox = null; + RangerServiceDef sd = null; + RangerService svc = null; + ResourceLookupContext lookupContext = null; + + + @Before + public void setup() { + configs = new HashMap<String,String>(); + lookupContext = new ResourceLookupContext(); + + buildHbaseConnectionConfig(); + buildLookupContext(); + + sd = new RangerServiceDef(sdName, " org.apache.ranger.services.knox.RangerServiceKnox", "TestKnoxService", "test Knox servicedef description", null, null, null, null, null); + svc = new RangerService(sdName, serviceName, "unit test Knox resource lookup and validateConfig", configs); + svcKnox = new RangerServiceKnox(); + svcKnox.init(sd, svc); + svcKnox.init(); + } + + @Test + public void testValidateConfig() { + + HashMap<String,Object> ret = null; + String errorMessage = null; + + try { + ret = svcKnox.validateConfig(); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + } + + + @Test + public void testLookUpResource() { + List<String> ret = new ArrayList<String>(); + String errorMessage = null; + try { + ret = svcKnox.lookupResource(lookupContext); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + } + + public void buildHbaseConnectionConfig() { + configs.put("username", "admin"); + configs.put("password", "admin-password"); + configs.put("knox.url", "https://localhost:8443/gateway/admin/api/v1/topologies"); + } + + public void buildLookupContext() { + Map<String, List<String>> resourceMap = new HashMap<String,List<String>>(); + resourceMap.put("topology", null); + lookupContext.setUserInput("a"); + lookupContext.setResourceName("topology"); + lookupContext.setResources(resourceMap); + } + + @After + public void tearDown() { + sd = null; + svc = null; + } + +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java ---------------------------------------------------------------------- diff --git a/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java b/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java index 922d174..04a1ffe 100644 --- a/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java +++ b/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java @@ -456,7 +456,7 @@ public class ServiceREST { } if(LOG.isDebugEnabled()) { - LOG.debug("<== ServiceREST.validateConfig(" + serviceName + "): " + ret); + LOG.debug("<== ServiceREST.lookupResource(" + serviceName + "): " + ret); } return ret;
