Ranger-225:Ranger-LookupResource and ValidateConfig implementation for all components in the new pluggable model - HDFS, HBase, Hive, Knox
Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/5a713177 Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/5a713177 Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/5a713177 Branch: refs/heads/stack Commit: 5a713177963989e6028e1918fd1c97e40c03023f Parents: a6f8050 Author: rmani <[email protected]> Authored: Tue Feb 10 22:18:11 2015 -0800 Committer: rmani <[email protected]> Committed: Tue Feb 10 22:18:11 2015 -0800 ---------------------------------------------------------------------- .../services/hbase/RangerServiceHBase.java | 105 ++++ .../services/hbase/client/HBaseClient.java | 444 +++++++++++++++ .../hbase/client/HBaseConnectionMgr.java | 139 +++++ .../services/hbase/client/HBaseResourceMgr.java | 167 ++++++ .../services/hbase/TestRangerServiceHBase.java | 145 +++++ hdfs-agent/pom.xml | 84 ++- .../ranger/services/hdfs/client/HdfsClient.java | 4 +- .../services/hdfs/client/HdfsConnectionMgr.java | 10 +- .../services/hdfs/client/HdfsResourceMgr.java | 30 +- .../services/hdfs/TestRangerServiceHdfs.java | 34 +- hive-agent/pom.xml | 6 +- .../ranger/services/hive/RangerServiceHive.java | 102 ++++ .../ranger/services/hive/client/HiveClient.java | 566 +++++++++++++++++++ .../services/hive/client/HiveConnectionMgr.java | 100 ++++ .../services/hive/client/HiveResourceMgr.java | 190 +++++++ .../services/hive/client/HiveClientTester.java | 99 ++++ .../hive/client/TestRangerServiceHive.java | 132 +++++ hive-agent/src/test/resource/log4j.properties | 16 + knox-agent/pom.xml | 71 ++- .../ranger/services/knox/RangerServiceKnox.java | 102 ++++ .../ranger/services/knox/client/KnoxClient.java | 397 +++++++++++++ .../services/knox/client/KnoxConnectionMgr.java | 119 ++++ .../services/knox/client/KnoxResourceMgr.java | 103 ++++ .../services/knox/client/KnoxClientTest.java | 41 ++ .../knox/client/TestRangerServiceKnox.java | 132 +++++ .../org/apache/ranger/rest/ServiceREST.java | 2 +- 26 files changed, 3263 insertions(+), 77 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java ---------------------------------------------------------------------- diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java new file mode 100644 index 0000000..2c6e311 --- /dev/null +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java @@ -0,0 +1,105 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.ranger.services.hbase; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.RangerBaseService; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.services.hbase.client.HBaseResourceMgr; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class RangerServiceHBase extends RangerBaseService { + + private static final Log LOG = LogFactory.getLog(RangerServiceHBase.class); + + RangerService service; + RangerServiceDef serviceDef; + Map<String, String> configs; + String serviceName; + + public RangerServiceHBase() { + super(); + } + + @Override + public void init(RangerServiceDef serviceDef, RangerService service) { + super.init(serviceDef, service); + init(); + } + + @Override + public HashMap<String,Object> validateConfig() throws Exception { + HashMap<String, Object> ret = new HashMap<String, Object>(); + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHBase.validateConfig() Service: (" + service + " )"); + } + if ( configs != null) { + try { + ret = HBaseResourceMgr.testConnection(service.getName(), service.getConfigs()); + } catch (Exception e) { + LOG.error("<== RangerServiceHBase.validateConfig() Error:" + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHBase.validateConfig() Response : (" + ret + " )"); + } + return ret; + } + + @Override + public List<String> lookupResource(ResourceLookupContext context) throws Exception { + + List<String> ret = new ArrayList<String>(); + String svc = service.getName(); + + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHBase.lookupResource() Service : " + svc + " Context: (" + context + ")"); + } + + if (context != null) { + try { + ret = HBaseResourceMgr.getHBaseResource(service.getName(),service.getConfigs(),context); + } catch (Exception e) { + LOG.error( "<==RangerServiceHBase.lookupResource() Error : " + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHBase.lookupResource() Response: (" + ret + ")"); + } + return ret; + } + + public void init() { + service = getService(); + serviceDef = getServiceDef(); + serviceName = service.getName(); + configs = service.getConfigs(); + } + +} + http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java ---------------------------------------------------------------------- diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java new file mode 100644 index 0000000..320e084 --- /dev/null +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java @@ -0,0 +1,444 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + package org.apache.ranger.services.hbase.client; + +import java.io.IOException; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import javax.security.auth.Subject; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HTableDescriptor; +import org.apache.hadoop.hbase.MasterNotRunningException; +import org.apache.hadoop.hbase.ZooKeeperConnectionException; +import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.ranger.plugin.client.BaseClient; +import org.apache.ranger.plugin.client.HadoopException; + +import com.google.protobuf.ServiceException; + +public class HBaseClient extends BaseClient { + + private static final Log LOG = LogFactory.getLog(HBaseClient.class) ; + + private static Subject subj = null; + + public HBaseClient(String serivceName) { + super(serivceName) ; + } + + public HBaseClient(String serivceName,Map<String,String> connectionProp) { + super(serivceName, addDefaultHBaseProp(connectionProp)) ; + } + + //TODO: temporary solution - to be added to the UI for HBase + private static Map<String,String> addDefaultHBaseProp(Map<String,String> connectionProp) { + if (connectionProp != null) { + String param = "zookeeper.znode.parent" ; + String unsecuredPath = "/hbase-unsecure" ; + String authParam = "hadoop.security.authorization" ; + + String ret = connectionProp.get(param) ; + LOG.info("HBase connection has [" + param + "] with value [" + ret + "]"); + if (ret == null) { + ret = connectionProp.get(authParam) ; + LOG.info("HBase connection has [" + authParam + "] with value [" + ret + "]"); + if (ret != null && ret.trim().equalsIgnoreCase("false")) { + LOG.info("HBase connection is resetting [" + param + "] with value [" + unsecuredPath + "]"); + connectionProp.put(param, unsecuredPath) ; + } + } + } + return connectionProp; + } + + public static HashMap<String, Object> testConnection(String dataSource, + Map<String, String> configs) { + + HashMap<String, Object> responseData = new HashMap<String, Object>(); + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + boolean connectivityStatus = false; + + HBaseClient connectionObj = new HBaseClient(dataSource, + configs); + if (connectionObj != null) { + connectivityStatus = connectionObj.getHBaseStatus(); + } + + if (connectivityStatus) { + String successMsg = "TestConnection Successful"; + generateResponseDataMap(connectivityStatus, successMsg, successMsg, + null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any databases using given parameters."; + generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + + errMsg, null, null, responseData); + } + return responseData; + } + + public boolean getHBaseStatus() { + boolean hbaseStatus = false; + subj = getLoginSubject(); + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (subj != null) { + ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ; + try { + Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader()); + + hbaseStatus = Subject.doAs(subj, new PrivilegedAction<Boolean>() { + @Override + public Boolean run() { + Boolean hbaseStatus1 = false; + try { + LOG.info("getHBaseStatus: creating default Hbase configuration"); + Configuration conf = HBaseConfiguration.create() ; + LOG.info("getHBaseStatus: setting config values from client"); + setClientConfigValues(conf); + LOG.info("getHBaseStatus: checking HbaseAvailability with the new config"); + HBaseAdmin.checkHBaseAvailable(conf); + LOG.info("getHBaseStatus: no exception: HbaseAvailability true"); + hbaseStatus1 = true; + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getHBaseStatus: Unable to connect to `ZooKeeper` " + + "using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + hdpException.generateResponseDataMap(false, getMessage(zce), + msgDesc + errMsg, null, null); + + LOG.error(msgDesc + zce) ; + throw hdpException; + + } catch (MasterNotRunningException mnre) { + String msgDesc = "getHBaseStatus: Looks like `Master` is not running, " + + "so couldn't check that running HBase is available or not, " + + "Please try again later."; + HadoopException hdpException = new HadoopException( + msgDesc, mnre); + hdpException.generateResponseDataMap(false, + getMessage(mnre), msgDesc + errMsg, + null, null); + LOG.error(msgDesc + mnre) ; + throw hdpException; + + } catch (ServiceException se) { + String msgDesc = "getHBaseStatus: Unable to check availability of " + + "Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + se); + throw hdpException; + + } catch(IOException io) { + String msgDesc = "getHBaseStatus: Unable to check availability of" + + " Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; + HadoopException hdpException = new HadoopException(msgDesc, io); + hdpException.generateResponseDataMap(false, getMessage(io), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + io) ; + throw hdpException; + + } catch (Throwable e) { + String msgDesc = "getHBaseStatus: Unable to check availability of" + + " Hbase environment [" + getConfigHolder().getDatasourceName() + "]."; + LOG.error(msgDesc + e); + hbaseStatus1 = false; + HadoopException hdpException = new HadoopException(msgDesc, e); + hdpException.generateResponseDataMap(false, getMessage(e), + msgDesc + errMsg, null, null); + throw hdpException; + } + return hbaseStatus1; + } + }) ; + } catch (SecurityException se) { + String msgDesc = "getHBaseStatus: Unable to connect to HBase Server instance, " + + "current thread might not be able set the context ClassLoader."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + se) ; + throw hdpException; + } finally { + Thread.currentThread().setContextClassLoader(prevCl); + } + } else { + LOG.error("getHBaseStatus: secure login not done, subject is null"); + } + + return hbaseStatus; + } + + private void setClientConfigValues(Configuration conf) { + if (this.connectionProperties == null) return; + Iterator<Entry<String, String>> i = this.connectionProperties.entrySet().iterator(); + while (i.hasNext()) { + Entry<String, String> e = i.next(); + String v = conf.get(e.getKey()); + if (v != null && !v.equalsIgnoreCase(e.getValue())) { + conf.set(e.getKey(), e.getValue()); + } + } + } + + public List<String> getTableList(final String tableNameMatching, final List<String> existingTableList ) { + List<String> ret = null ; + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + subj = getLoginSubject(); + + if (subj != null) { + ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ; + try { + Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader()); + + ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() { + + @Override + public List<String> run() { + + List<String> tableList = new ArrayList<String>() ; + HBaseAdmin admin = null ; + try { + LOG.info("getTableList: creating default Hbase configuration"); + Configuration conf = HBaseConfiguration.create() ; + LOG.info("getTableList: setting config values from client"); + setClientConfigValues(conf); + LOG.info("getTableList: checking HbaseAvailability with the new config"); + HBaseAdmin.checkHBaseAvailable(conf); + LOG.info("getTableList: no exception: HbaseAvailability true"); + admin = new HBaseAdmin(conf) ; + for (HTableDescriptor htd : admin.listTables(tableNameMatching)) { + String tableName = htd.getNameAsString(); + if ( existingTableList != null && existingTableList.contains(tableName)) { + continue; + } else { + tableList.add(htd.getNameAsString()); + } + } + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getTableList: Unable to connect to `ZooKeeper` " + + "using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + hdpException.generateResponseDataMap(false, getMessage(zce), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + zce) ; + throw hdpException; + + } catch (MasterNotRunningException mnre) { + String msgDesc = "getTableList: Looks like `Master` is not running, " + + "so couldn't check that running HBase is available or not, " + + "Please try again later."; + HadoopException hdpException = new HadoopException( + msgDesc, mnre); + hdpException.generateResponseDataMap(false, + getMessage(mnre), msgDesc + errMsg, + null, null); + LOG.error(msgDesc + mnre) ; + throw hdpException; + + } catch(IOException io) { + String msgDesc = "getTableList: Unable to get HBase table List for [repository:" + + getConfigHolder().getDatasourceName() + ",table-match:" + + tableNameMatching + "]."; + HadoopException hdpException = new HadoopException(msgDesc, io); + hdpException.generateResponseDataMap(false, getMessage(io), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + io) ; + throw hdpException; + } catch (Throwable e) { + String msgDesc = "getTableList : Unable to get HBase table List for [repository:" + + getConfigHolder().getDatasourceName() + ",table-match:" + + tableNameMatching + "]."; + LOG.error(msgDesc + e); + HadoopException hdpException = new HadoopException(msgDesc, e); + hdpException.generateResponseDataMap(false, getMessage(e), + msgDesc + errMsg, null, null); + throw hdpException; + } + finally { + if (admin != null) { + try { + admin.close() ; + } catch (IOException e) { + LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e); + } + } + } + return tableList ; + } + + }) ; + } + finally { + Thread.currentThread().setContextClassLoader(prevCl); + } + } + return ret ; + } + + + public List<String> getColumnFamilyList(final String columnFamilyMatching, final List<String> tableList,final List<String> existingColumnFamilies) { + List<String> ret = null ; + final String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + subj = getLoginSubject(); + if (subj != null) { + ClassLoader prevCl = Thread.currentThread().getContextClassLoader() ; + try { + Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader()); + + ret = Subject.doAs(subj, new PrivilegedAction<List<String>>() { + String tblName = null; + @Override + public List<String> run() { + List<String> colfList = new ArrayList<String>() ; + HBaseAdmin admin = null ; + try { + LOG.info("getColumnFamilyList: creating default Hbase configuration"); + Configuration conf = HBaseConfiguration.create() ; + LOG.info("getColumnFamilyList: setting config values from client"); + setClientConfigValues(conf); + LOG.info("getColumnFamilyList: checking HbaseAvailability with the new config"); + HBaseAdmin.checkHBaseAvailable(conf); + LOG.info("getColumnFamilyList: no exception: HbaseAvailability true"); + admin = new HBaseAdmin(conf) ; + if (tableList != null) { + for (String tableName: tableList) { + tblName = tableName; + HTableDescriptor htd = admin.getTableDescriptor(tblName.getBytes()) ; + if (htd != null) { + for (HColumnDescriptor hcd : htd.getColumnFamilies()) { + String colf = hcd.getNameAsString() ; + if (colf.matches(columnFamilyMatching)) { + if (existingColumnFamilies != null && existingColumnFamilies.contains(colf)) { + continue; + } else { + colfList.add(colf); + } + + } + } + } + } + } + } catch (ZooKeeperConnectionException zce) { + String msgDesc = "getColumnFamilyList: Unable to connect to `ZooKeeper` " + + "using given config parameters."; + HadoopException hdpException = new HadoopException(msgDesc, zce); + hdpException.generateResponseDataMap(false, getMessage(zce), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + zce) ; + throw hdpException; + + } catch (MasterNotRunningException mnre) { + String msgDesc = "getColumnFamilyList: Looks like `Master` is not running, " + + "so couldn't check that running HBase is available or not, " + + "Please try again later."; + HadoopException hdpException = new HadoopException( + msgDesc, mnre); + hdpException.generateResponseDataMap(false, + getMessage(mnre), msgDesc + errMsg, + null, null); + LOG.error(msgDesc + mnre) ; + throw hdpException; + + } catch(IOException io) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName + + ", table-match:" + columnFamilyMatching + "], " + + "current thread might not be able set the context ClassLoader."; + HadoopException hdpException = new HadoopException(msgDesc, io); + hdpException.generateResponseDataMap(false, getMessage(io), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + io) ; + throw hdpException; + } catch (SecurityException se) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName + + ", table-match:" + columnFamilyMatching + "], " + + "current thread might not be able set the context ClassLoader."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + se) ; + throw hdpException; + + } catch (Throwable e) { + String msgDesc = "getColumnFamilyList: Unable to get HBase ColumnFamilyList for " + + "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tblName + + ", table-match:" + columnFamilyMatching + "], " + + "current thread might not be able set the context ClassLoader."; + LOG.error(msgDesc); + HadoopException hdpException = new HadoopException(msgDesc, e); + hdpException.generateResponseDataMap(false, getMessage(e), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + e) ; + throw hdpException; + } + finally { + if (admin != null) { + try { + admin.close() ; + } catch (IOException e) { + LOG.error("Unable to close HBase connection [" + getConfigHolder().getDatasourceName() + "]", e); + } + } + } + return colfList ; + } + + }) ; + } catch (SecurityException se) { + String msgDesc = "getColumnFamilyList: Unable to connect to HBase Server instance, " + + "current thread might not be able set the context ClassLoader."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), + msgDesc + errMsg, null, null); + LOG.error(msgDesc + se) ; + throw hdpException; + } finally { + Thread.currentThread().setContextClassLoader(prevCl); + } + } + return ret ; + } + +} + http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java ---------------------------------------------------------------------- diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java new file mode 100644 index 0000000..3797f45 --- /dev/null +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseConnectionMgr.java @@ -0,0 +1,139 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hbase.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.log4j.Logger; +import org.apache.ranger.services.hbase.client.HBaseClient; +import org.apache.ranger.plugin.store.ServiceStoreFactory; +import org.apache.ranger.plugin.util.TimedEventUtil; + + +public class HBaseConnectionMgr { + + private static Logger LOG = Logger.getLogger(HBaseConnectionMgr.class); + + protected HashMap<String, HBaseClient> hbaseConnectionCache; + + protected HashMap<String, Boolean> repoConnectStatusMap; + + public HBaseConnectionMgr() { + hbaseConnectionCache = new HashMap<String, HBaseClient>(); + repoConnectStatusMap = new HashMap<String, Boolean>(); + } + + public HBaseClient getHBaseConnection(final String serviceName, final Map<String,String> configs) { + + HBaseClient client = null; + String serviceType = null; + try { + serviceType = ServiceStoreFactory + .instance() + .getServiceStore() + .getServiceByName(serviceName) + .getType(); + } catch (Exception ex) { + LOG.error("Service could not be found for the Service Name : " + serviceName , ex); + } + if (serviceType != null) { + // get it from the cache + synchronized (hbaseConnectionCache) { + client = hbaseConnectionCache.get(serviceType); + if (client == null) { + if ( configs == null ) { + final Callable<HBaseClient> connectHBase = new Callable<HBaseClient>() { + @Override + public HBaseClient call() throws Exception { + HBaseClient hBaseClient=null; + if(serviceName!=null){ + try{ + hBaseClient=new HBaseClient(serviceName); + }catch(Exception ex){ + LOG.error("Error connecting HBase repository : ", ex); + } + } + return hBaseClient; + } + }; + + try { + if(connectHBase!=null){ + client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); + } + } catch(Exception e){ + LOG.error("Error connecting HBase repository : " + serviceName); + } + } else { + + final Callable<HBaseClient> connectHBase = new Callable<HBaseClient>() { + @Override + public HBaseClient call() throws Exception { + HBaseClient hBaseClient=null; + if(serviceName!=null && configs !=null){ + try{ + hBaseClient=new HBaseClient(serviceName,configs); + }catch(Exception ex){ + LOG.error("Error connecting HBase repository : ", ex); + } + } + return hBaseClient; + } + }; + + try { + if(connectHBase!=null){ + client = TimedEventUtil.timedTask(connectHBase, 5, TimeUnit.SECONDS); + } + } catch(Exception e){ + LOG.error("Error connecting HBase repository : "+ + serviceName +" using config : "+ configs); + } + } + + if(client!=null){ + hbaseConnectionCache.put(serviceType, client); + } + + } else { + + List<String> testConnect = client.getTableList(".\\*",null); + + if(testConnect == null){ + hbaseConnectionCache.remove(serviceType); + client = getHBaseConnection(serviceName,configs); + } + } + repoConnectStatusMap.put(serviceType, true); + } + } else { + LOG.error("Service Name not found with name " + serviceName, + new Throwable()); + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseConnectionMgr.getHBaseConnection() HbaseClient : "+ client ) ; + } + return client; + } +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java ---------------------------------------------------------------------- diff --git a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java new file mode 100644 index 0000000..f107bf0 --- /dev/null +++ b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hbase.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.log4j.Logger; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.plugin.util.TimedEventUtil; + + +public class HBaseResourceMgr { + + public static final Logger LOG = Logger.getLogger(HBaseResourceMgr.class); + + private static final String TABLE = "table"; + private static final String COLUMNFAMILY = "column-family"; + + public static HashMap<String, Object> testConnection(String serviceName, Map<String, String> configs) throws Exception { + HashMap<String, Object> ret = null; + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseResourceMgr.testConnection() ServiceName: "+ serviceName + "Configs" + configs ) ; + } + + try { + ret = HBaseClient.testConnection(serviceName, configs); + } catch (Exception e) { + LOG.error("<== HBaseResourceMgr.testConnection() Error: " + e) ; + throw e; + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseResourceMgr.testConnection() Result: "+ ret ) ; + } + return ret; + } + + public static List<String> getHBaseResource(String serviceName, Map<String, String> configs,ResourceLookupContext context) throws Exception{ + + String userInput = context.getUserInput(); + String resource = context.getResourceName(); + Map<String, List<String>> resourceMap = context.getResources(); + List<String> resultList = null; + String tableName = null; + String columnFamilies = null; + List<String> tableList = null; + List<String> columnFamilyList = null; + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseResourceMgr.getHBaseResource UserInput: \""+ userInput + "\" resource : " + resource + " resourceMap: " + resourceMap) ; + } + + if ( userInput != null && resource != null) { + if ( resourceMap != null && !resourceMap.isEmpty() && ( resourceMap.get(TABLE) != null || resourceMap.get(COLUMNFAMILY) != null) ) { + switch (resource.trim().toLowerCase()) { + case TABLE: + tableName = userInput; + tableList = resourceMap.get(TABLE); + break; + case COLUMNFAMILY: + columnFamilies = userInput; + columnFamilyList = resourceMap.get(COLUMNFAMILY); + break; + default: + break; + } + } + else { + switch (resource.trim().toLowerCase()) { + case TABLE: + tableName = userInput; + break; + case COLUMNFAMILY: + columnFamilies = userInput; + break; + default: + break; + } + } + } + + + if (serviceName != null && userInput != null && resource != null) { + final List<String> finaltableList = tableList; + final List<String> finalcolumnFamilyList = columnFamilyList; + + try { + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseResourceMgr.getHBaseResource UserInput: \""+ userInput + "\" configs: " + configs + " context: " + context) ; + } + final HBaseClient hBaseClient = new HBaseConnectionMgr().getHBaseConnection(serviceName,configs); + final Callable<List<String>> callableObj; + + if (hBaseClient != null && tableName != null + && !tableName.isEmpty()) { + final String finalColFamilies; + final String finalTableName; + if (columnFamilies != null && !columnFamilies.isEmpty()) { + if (!columnFamilies.endsWith("*")) { + columnFamilies += "*"; + } + + columnFamilies = columnFamilies.replaceAll("\\*", + ".\\*"); + finalColFamilies = columnFamilies; + finalTableName = tableName; + + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return hBaseClient.getColumnFamilyList(finalColFamilies,finaltableList,finalcolumnFamilyList); + } + }; + + } else { + if (!tableName.endsWith("*")) { + tableName += "*"; + } + + tableName = tableName.replaceAll("\\*", ".\\*"); + finalTableName = tableName; + + callableObj = new Callable<List<String>>() { + @Override + public List<String> call() { + return hBaseClient.getTableList(finalTableName,finaltableList); + } + }; + + } + resultList = TimedEventUtil.timedTask(callableObj, 5, + TimeUnit.SECONDS); + } + + } catch (Exception e) { + LOG.error("Unable to get hbase resources.", e); + throw e; + } + } + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HBaseResourceMgr.getHBaseResource() Result :" + resultList) ; + } + + return resultList; + } +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java ---------------------------------------------------------------------- diff --git a/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java b/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java new file mode 100644 index 0000000..3c594ab --- /dev/null +++ b/hbase-agent/src/test/java/org/apache/ranger/services/hbase/TestRangerServiceHBase.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hbase; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.client.HadoopException; +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.plugin.store.ServiceStore; +import org.apache.ranger.plugin.store.ServiceStoreFactory; +import org.apache.ranger.services.hbase.RangerServiceHBase; +import org.apache.ranger.services.hbase.client.HBaseClient; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; +import org.mockito.Mockito; + + +public class TestRangerServiceHBase { + static ServiceStore svcStore = null; + static final String sdName = "svcDef-HBase"; + static final String serviceName = "HBaseDef"; + HashMap<String, Object> responseData = null; + Map<String, String> configs = null; + RangerServiceHBase svcHBase = null; + RangerServiceDef sd = null; + RangerService svc = null; + ResourceLookupContext lookupContext = null; + + + @Before + public void setup() { + configs = new HashMap<String,String>(); + lookupContext = new ResourceLookupContext(); + + buildHbaseConnectionConfig(); + buildLookupContext(); + + svcStore = ServiceStoreFactory.instance().getServiceStore(); + + sd = new RangerServiceDef(sdName, "org.apache.ranger.services.hbase.RangerServiceHBase", "TestService", "test servicedef description", null, null, null, null, null); + svc = new RangerService(sdName, serviceName, "unit test hbase resource lookup and validateConfig", configs); + svcHBase = new RangerServiceHBase(); + svcHBase.init(sd, svc); + svcHBase.init(); + } + + @Test + public void testValidateConfig() { + + HashMap<String,Object> ret = null; + String errorMessage = null; + + try { + ret = svcHBase.validateConfig(); + }catch (Exception e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + } + + + @Test + public void testLookUpResource() { + List<String> ret = new ArrayList<String>(); + List<String> mockresult = new ArrayList<String>(){{add("iemployee");add("idepartment");}}; + String errorMessage = null; + HBaseClient hbaseClient = new HBaseClient("hbasedev", configs); + try { + Mockito.when(hbaseClient.getTableList("iem", null)).thenReturn(mockresult); + ret = svcHBase.lookupResource(lookupContext); + }catch (Throwable e) { + errorMessage = e.getMessage(); + if ( e instanceof HadoopException) { + errorMessage = "HadoopException"; + } + } + + if ( errorMessage != null) { + assertTrue(errorMessage.contains("HadoopException")); + } else { + assertNotNull(ret); + } + } + + public void buildHbaseConnectionConfig() { + configs.put("username", "hbaseuser"); + configs.put("password", "*******"); + configs.put("hadoop.security.authentication", "simple"); + configs.put("hbase.master.kerberos.principal", "hbase/[email protected]"); + configs.put("hbase.security.authentication", "simple"); + configs.put("hbase.zookeeper.property.clientPort", "2181"); + configs.put("hbase.zookeeper.quorum", "localhost"); + configs.put("zookeeper.znode.parent","/hbase-unsecure"); + configs.put("isencrypted", "true"); + } + + public void buildLookupContext() { + Map<String, List<String>> resourceMap = new HashMap<String,List<String>>(); + resourceMap.put(null, null); + lookupContext.setUserInput("iem"); + lookupContext.setResourceName("table"); + lookupContext.setResources(resourceMap); + } + + + @After + public void tearDown() { + sd = null; + svc = null; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hdfs-agent/pom.xml ---------------------------------------------------------------------- diff --git a/hdfs-agent/pom.xml b/hdfs-agent/pom.xml index db0fbee..7947a73 100644 --- a/hdfs-agent/pom.xml +++ b/hdfs-agent/pom.xml @@ -43,29 +43,83 @@ <version>${commons.logging.version}</version> </dependency> <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-common</artifactId> - <version>${hadoop.version}</version> + <groupId>javax.servlet</groupId> + <artifactId>javax.servlet-api</artifactId> + <version>${javax.servlet.version}</version> </dependency> <dependency> - <groupId>org.apache.hadoop</groupId> - <artifactId>hadoop-hdfs</artifactId> - <version>${hadoop.version}</version> + <groupId>org.apache.calcite</groupId> + <artifactId>calcite-core</artifactId> + <version>${calcite.version}</version> </dependency> <dependency> - <groupId>javassist</groupId> - <artifactId>javassist</artifactId> - <version>${javassist.version}</version> + <groupId>org.apache.calcite</groupId> + <artifactId>calcite-avatica</artifactId> + <version>${calcite.version}</version> </dependency> <dependency> - <groupId>security_plugins.ranger-plugins-audit</groupId> - <artifactId>ranger-plugins-audit</artifactId> - <version>${project.version}</version> + <groupId>org.apache.tez</groupId> + <artifactId>tez-api</artifactId> + <version>${tez.version}</version> + <optional>true</optional> </dependency> <dependency> - <groupId>security_plugins.ranger-plugins-common</groupId> - <artifactId>ranger-plugins-common</artifactId> - <version>${project.version}</version> + <groupId>org.apache.tez</groupId> + <artifactId>tez-runtime-library</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-runtime-internals</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.tez</groupId> + <artifactId>tez-mapreduce</artifactId> + <version>${tez.version}</version> + <optional>true</optional> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hbase</groupId> + <artifactId>hbase-server</artifactId> + <version>${hbase.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-common</artifactId> + <version>${hive.version}</version> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-service</artifactId> + <version>${hive.version}</version> + </dependency> + <dependency> + <groupId>javassist</groupId> + <artifactId>javassist</artifactId> + <version>${javassist.version}</version> + </dependency> + <dependency> + <groupId>security_plugins.ranger-plugins-audit</groupId> + <artifactId>ranger-plugins-audit</artifactId> + <version>${project.version}</version> + </dependency> + <dependency> + <groupId>security_plugins.ranger-plugins-common</groupId> + <artifactId>ranger-plugins-common</artifactId> + <version>${project.version}</version> </dependency> <dependency> <groupId>junit</groupId> http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java index ff34f4f..e551cb6 100644 --- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java +++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java @@ -190,11 +190,11 @@ public class HdfsClient extends BaseClient { } public static HashMap<String, Object> testConnection(String serviceName, - Map<String, String> connectionProperties) { + Map<String, String> configs) { HashMap<String, Object> responseData = new HashMap<String, Object>(); boolean connectivityStatus = false; - HdfsClient connectionObj = new HdfsClient(serviceName, connectionProperties); + HdfsClient connectionObj = new HdfsClient(serviceName, configs); if (connectionObj != null) { List<String> testResult = connectionObj.listFiles("/", null,null); if (testResult != null && testResult.size() != 0) { http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java index e13ee9e..8311ec0 100644 --- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java +++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java @@ -36,7 +36,7 @@ public class HdfsConnectionMgr { protected Map<String, HdfsClient> hdfdsConnectionCache = null; protected Map<String, Boolean> repoConnectStatusMap = null; - private static Logger logger = Logger.getLogger(HdfsConnectionMgr.class); + private static Logger LOG = Logger.getLogger(HdfsConnectionMgr.class); public HdfsConnectionMgr(){ hdfdsConnectionCache = new HashMap<String, HdfsClient>(); @@ -54,7 +54,7 @@ public class HdfsConnectionMgr { .getServiceByName(serviceName) .getType(); } catch (Exception ex) { - logger.error("Service could not be found for the Service Name : " + serviceName , ex); + LOG.error("Service could not be found for the Service Name : " + serviceName , ex); } if (serviceType != null) { // get it from the cache @@ -72,7 +72,7 @@ public class HdfsConnectionMgr { try { hdfsClient = TimedEventUtil.timedTask(connectHDFS, 10, TimeUnit.SECONDS); } catch(Exception e){ - logger.error("Error establishing connection for HDFS repository : " + LOG.error("Error establishing connection for HDFS repository : " + serviceName, e); } @@ -88,7 +88,7 @@ public class HdfsConnectionMgr { try { hdfsClient = TimedEventUtil.timedTask(connectHDFS, 5, TimeUnit.SECONDS); } catch(Exception e){ - logger.error("Error establishing connection for HDFS repository : " + LOG.error("Error establishing connection for HDFS repository : " + serviceName + " using configuration : " + configs, e); } } @@ -103,7 +103,7 @@ public class HdfsConnectionMgr { } } } else { - logger.error("Serice not found with name "+serviceName, new Throwable()); + LOG.error("Serice not found with name "+serviceName, new Throwable()); } return hdfsClient; http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java index b44c53a..7739e56 100644 --- a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java +++ b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java @@ -32,25 +32,25 @@ import org.apache.ranger.plugin.util.TimedEventUtil; public class HdfsResourceMgr { - public static final String PATH = "path"; - public static final Logger logger = Logger.getLogger(HdfsResourceMgr.class); - + public static final Logger LOG = Logger.getLogger(HdfsResourceMgr.class); + public static final String PATH = "path"; + public static HashMap<String, Object> testConnection(String serviceName, Map<String, String> configs) throws Exception { HashMap<String, Object> ret = null; - if(logger.isDebugEnabled()) { - logger.debug("<== HdfsResourceMgr.testConnection ServiceName: "+ serviceName + "Configs" + configs ) ; + if(LOG.isDebugEnabled()) { + LOG.debug("<== HdfsResourceMgr.testConnection ServiceName: "+ serviceName + "Configs" + configs ) ; } try { ret = HdfsClient.testConnection(serviceName, configs); } catch (Exception e) { - logger.error("<== HdfsResourceMgr.testConnection Error: " + e) ; + LOG.error("<== HdfsResourceMgr.testConnection Error: " + e) ; throw e; } - if(logger.isDebugEnabled()) { - logger.debug("<== HdfsResourceMgr.HdfsResourceMgr Result : "+ ret ) ; + if(LOG.isDebugEnabled()) { + LOG.debug("<== HdfsResourceMgr.testConnection Result : "+ ret ) ; } return ret; } @@ -71,8 +71,8 @@ public class HdfsResourceMgr { if (serviceName != null && userInput != null) { try { - if(logger.isDebugEnabled()) { - logger.debug("<== HdfsResourceMgr.HdfsResourceMgr UserInput: "+ userInput + "configs: " + configs + "context: " + context) ; + if(LOG.isDebugEnabled()) { + LOG.debug("<== HdfsResourceMgr.getHdfsResources() UserInput: "+ userInput + "configs: " + configs + "context: " + context) ; } String wildCardToMatch; @@ -107,20 +107,20 @@ public class HdfsResourceMgr { }; resultList = TimedEventUtil.timedTask(callableObj, 5,TimeUnit.SECONDS); - if(logger.isDebugEnabled()) { - logger.debug("Resource dir : " + userInput + if(LOG.isDebugEnabled()) { + LOG.debug("Resource dir : " + userInput + " wild card to match : " + wildCardToMatch + "\n Matching resources : " + resultList); } } } catch (Exception e) { - logger.error("Unable to get hdfs resources.", e); + LOG.error("Unable to get hdfs resources.", e); throw e; } } - if(logger.isDebugEnabled()) { - logger.debug("<== HdfsResourceMgr.HdfsResourceMgr Result : "+ resultList ) ; + if(LOG.isDebugEnabled()) { + LOG.debug("<== HdfsResourceMgr.getHdfsResources() Result : "+ resultList ) ; } return resultList; } http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java index dde8d7e..b19bd9b 100644 --- a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java +++ b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java @@ -39,7 +39,6 @@ import org.junit.Test; public class TestRangerServiceHdfs { - static ServiceStore svcStore = null; static final String sdName = "svcDef-Hdfs"; static final String serviceName = "Hdfsdev"; HashMap<String, Object> responseData = null; @@ -57,21 +56,12 @@ public class TestRangerServiceHdfs { buildHdfsConnectionConfig(); buildLookupContext(); - - svcStore = ServiceStoreFactory.instance().getServiceStore(); - + sd = new RangerServiceDef(sdName, "org.apache.ranger.service.hdfs.RangerServiceHdfs", "TestService", "test servicedef description", null, null, null, null, null); svc = new RangerService(sdName, serviceName, "unit test hdfs resource lookup and validateConfig",configs); svcHdfs = new RangerServiceHdfs(); svcHdfs.init(sd, svc); svcHdfs.init(); - cleanupBeforeTest(); - try { - svcStore.createServiceDef(sd); - svcStore.createService(svc); - } catch (Exception e) { - e.printStackTrace(); - } } @Test @@ -133,27 +123,7 @@ public class TestRangerServiceHdfs { lookupContext.setResources(resourceMap); } - public void cleanupBeforeTest() { - - try { - List<RangerService> services = svcStore.getServices(null); - for(RangerService service : services) { - if(service.getName().startsWith(serviceName)) { - svcStore.deleteService(service.getId()); - } - } - - List<RangerServiceDef> serviceDefs = svcStore.getServiceDefs(null); - for(RangerServiceDef serviceDef : serviceDefs) { - if(serviceDef.getName().startsWith(sdName)) { - svcStore.deleteServiceDef(serviceDef.getId()); - } - } - } catch (Exception e) { - e.printStackTrace(); - } - } - + @After public void tearDown() { sd = null; http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/pom.xml ---------------------------------------------------------------------- diff --git a/hive-agent/pom.xml b/hive-agent/pom.xml index 1b19025..3b3bc66 100644 --- a/hive-agent/pom.xml +++ b/hive-agent/pom.xml @@ -67,7 +67,6 @@ <version>${tez.version}</version> <optional>true</optional> </dependency> - <dependency> <groupId>commons-lang</groupId> <artifactId>commons-lang</artifactId> @@ -89,6 +88,11 @@ <version>${hive.version}</version> </dependency> <dependency> + <groupId>com.google.code.gson</groupId> + <artifactId>gson</artifactId> + <version>${gson.version}</version> + </dependency> + <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-metastore</artifactId> <version>${hive.version}</version> http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java new file mode 100644 index 0000000..4632b52 --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.ranger.services.hive; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.ranger.plugin.model.RangerService; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.service.RangerBaseService; +import org.apache.ranger.plugin.service.ResourceLookupContext; +import org.apache.ranger.services.hive.client.HiveResourceMgr; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; + +public class RangerServiceHive extends RangerBaseService { + + private static final Log LOG = LogFactory.getLog(RangerServiceHive.class); + + RangerService service; + RangerServiceDef serviceDef; + Map<String, String> configs; + String serviceName; + + public RangerServiceHive() { + super(); + } + + @Override + public void init(RangerServiceDef serviceDef, RangerService service) { + super.init(serviceDef, service); + init(); + } + + @Override + public HashMap<String,Object> validateConfig() throws Exception { + HashMap<String, Object> ret = new HashMap<String, Object>(); + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHive.validateConfig Service: (" + service + " )"); + } + if ( configs != null) { + try { + ret = HiveResourceMgr.testConnection(service.getName(), service.getConfigs()); + } catch (Exception e) { + LOG.error("<== RangerServiceHive.validateConfig Error:" + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHive.validateConfig Response : (" + ret + " )"); + } + return ret; + } + + @Override + public List<String> lookupResource(ResourceLookupContext context) throws Exception { + + List<String> ret = new ArrayList<String>(); + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHive.lookupResource Context: (" + context + ")"); + } + if (context != null) { + try { + ret = HiveResourceMgr.getHiveResources(service.getName(),service.getConfigs(),context); + } catch (Exception e) { + LOG.error( "<==RangerServiceHive.lookupResource Error : " + e); + throw e; + } + } + if(LOG.isDebugEnabled()) { + LOG.debug("<== RangerServiceHive.lookupResource Response: (" + ret + ")"); + } + return ret; + } + + public void init() { + service = getService(); + serviceDef = getServiceDef(); + serviceName = service.getName(); + configs = service.getConfigs(); + } + +} + http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java new file mode 100644 index 0000000..da08240 --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java @@ -0,0 +1,566 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + package org.apache.ranger.services.hive.client; + +import java.io.Closeable; +import java.security.PrivilegedAction; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLTimeoutException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import javax.security.auth.Subject; + +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.ranger.plugin.client.BaseClient; +import org.apache.ranger.plugin.client.HadoopException; + +public class HiveClient extends BaseClient implements Closeable { + + private static final Log LOG = LogFactory.getLog(HiveClient.class) ; + + Connection con = null ; + boolean isKerberosAuth=false; + + + public HiveClient(String serviceName) { + super(serviceName) ; + initHive() ; + } + + public HiveClient(String serviceName,Map<String,String> connectionProp) { + super(serviceName,connectionProp) ; + initHive() ; + } + + public void initHive() { + isKerberosAuth = getConfigHolder().isKerberosAuthentication(); + if (isKerberosAuth) { + LOG.info("Secured Mode: JDBC Connection done with preAuthenticated Subject"); + Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() { + public Object run() { + initConnection(); + return null; + } + }) ; + } + else { + LOG.info("Since Password is NOT provided, Trying to use UnSecure client with username and password"); + final String userName = getConfigHolder().getUserName() ; + final String password = getConfigHolder().getPassword() ; + Subject.doAs(getLoginSubject(), new PrivilegedAction<Object>() { + public Object run() { + initConnection(userName,password); + return null; + } + }) ; + } + } + + public List<String> getDatabaseList(String databaseMatching, final List<String> databaseList){ + final String dbMatching = databaseMatching; + final List<String> dbList = databaseList; + List<String> dblist = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() { + public List<String> run() { + return getDBList(dbMatching,dbList); + } + }) ; + return dblist; + } + + private List<String> getDBList(String databaseMatching, List<String>dbList) { + List<String> ret = new ArrayList<String>() ; + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (con != null) { + Statement stat = null ; + ResultSet rs = null ; + String sql = "show databases" ; + if (databaseMatching != null && ! databaseMatching.isEmpty()) { + sql = sql + " like \"" + databaseMatching + "\"" ; + } + try { + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient getDBList databaseMatching : " + databaseMatching + " ExcludedbList :" + dbList) ; + } + stat = con.createStatement() ; + rs = stat.executeQuery(sql) ; + while (rs.next()) { + String dbName = rs.getString(1); + if ( dbList != null && dbList.contains(dbName)) { + continue; + } + ret.add(rs.getString(1)) ; + } + } catch (SQLTimeoutException sqlt) { + String msgDesc = "Time Out, Unable to execute SQL [" + sql + + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqlt); + hdpException.generateResponseDataMap(false, getMessage(sqlt), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getDBList() Error : " + sqlt) ; + } + throw hdpException; + } catch (SQLException sqle) { + String msgDesc = "Unable to execute SQL [" + sql + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqle); + hdpException.generateResponseDataMap(false, getMessage(sqle), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getDBList() Error : " + sqle) ; + } + throw hdpException; + } finally { + close(rs) ; + close(stat) ; + } + + } + return ret ; + } + + public List<String> getTableList(String tableNameMatching, List<String> databaseList, List<String> tblNameList){ + final String tblNameMatching = tableNameMatching; + final List<String> dbList = databaseList; + final List<String> tblList = tblNameList; + + List<String> tableList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() { + public List<String> run() { + return getTblList(tblNameMatching,dbList,tblList); + } + }) ; + return tableList; + } + + public List<String> getTblList(String tableNameMatching, List<String> dbList, List<String> tblList) { + List<String> ret = new ArrayList<String>() ; + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (con != null) { + Statement stat = null ; + ResultSet rs = null ; + + String sql = null ; + + try { + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient getTblList tableNameMatching: " + tableNameMatching + " dbList :" + dbList + " tblList: " + tblList) ; + } + if (dbList != null && !dbList.isEmpty()) { + for ( String db: dbList) { + sql = "use " + db; + + try { + stat = con.createStatement() ; + stat.execute(sql) ; + } + finally { + close(stat) ; + } + + sql = "show tables " ; + if (tableNameMatching != null && ! tableNameMatching.isEmpty()) { + sql = sql + " like \"" + tableNameMatching + "\"" ; + } + stat = con.createStatement() ; + rs = stat.executeQuery(sql) ; + while (rs.next()) { + String tblName = rs.getString(1); + if ( tblList != null && tblList.contains(tblName)) { + continue; + } + ret.add(tblName); + } + } + } + } catch (SQLTimeoutException sqlt) { + String msgDesc = "Time Out, Unable to execute SQL [" + sql + + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqlt); + hdpException.generateResponseDataMap(false, getMessage(sqlt), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getTblList() Error : " + sqlt) ; + } + throw hdpException; + } catch (SQLException sqle) { + String msgDesc = "Unable to execute SQL [" + sql + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqle); + hdpException.generateResponseDataMap(false, getMessage(sqle), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getTblList() Error : " + sqle) ; + } + throw hdpException; + } finally { + close(rs) ; + close(stat) ; + } + + } + return ret ; + } + + public List<String> getViewList(String database, String viewNameMatching) { + List<String> ret = null ; + return ret ; + } + + public List<String> getUDFList(String database, String udfMatching) { + List<String> ret = null ; + return ret ; + } + + public List<String> getColumnList(String columnNameMatching, List<String> dbList, List<String> tblList, List<String> colList) { + final String clmNameMatching = columnNameMatching; + final List<String> databaseList = dbList; + final List<String> tableList = tblList; + final List<String> clmList = colList; + List<String> columnList = Subject.doAs(getLoginSubject(), new PrivilegedAction<List<String>>() { + public List<String> run() { + return getClmList(clmNameMatching,databaseList,tableList,clmList); + } + }) ; + return columnList; + } + + public List<String> getClmList(String columnNameMatching,List<String> dbList, List<String> tblList, List<String> colList) { + List<String> ret = new ArrayList<String>() ; + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + if (con != null) { + + String columnNameMatchingRegEx = null ; + + if (columnNameMatching != null && ! columnNameMatching.isEmpty()) { + columnNameMatchingRegEx = columnNameMatching ; + } + + Statement stat = null ; + ResultSet rs = null ; + + String sql = null ; + + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getClmList() columnNameMatching: " + columnNameMatching + " dbList :" + dbList + " tblList: " + tblList + " colList: " + colList) ; + } + + if (dbList != null && !dbList.isEmpty() && + tblList != null && !tblList.isEmpty()) { + for (String db: dbList) { + for(String tbl:tblList) { + try { + sql = "use " + db; + + try { + stat = con.createStatement() ; + stat.execute(sql) ; + } + finally { + close(stat) ; + } + + sql = "describe " + tbl ; + stat = con.createStatement() ; + rs = stat.executeQuery(sql) ; + while (rs.next()) { + String columnName = rs.getString(1) ; + if (colList != null && colList.contains(columnName)) { + continue; + } + if (columnNameMatchingRegEx == null) { + ret.add(columnName) ; + } + else if (FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) { + ret.add(columnName) ; + } + } + + } catch (SQLTimeoutException sqlt) { + String msgDesc = "Time Out, Unable to execute SQL [" + sql + + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqlt); + hdpException.generateResponseDataMap(false, getMessage(sqlt), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getClmList() Error : " + sqlt) ; + } + throw hdpException; + } catch (SQLException sqle) { + String msgDesc = "Unable to execute SQL [" + sql + "]."; + HadoopException hdpException = new HadoopException(msgDesc, + sqle); + hdpException.generateResponseDataMap(false, getMessage(sqle), + msgDesc + errMsg, null, null); + if(LOG.isDebugEnabled()) { + LOG.debug("<== HiveClient.getClmList() Error : " + sqle) ; + } + throw hdpException; + } finally { + close(rs) ; + close(stat) ; + } + } + } + } + } + return ret ; + } + + + public void close() { + Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>(){ + public Void run() { + close(con) ; + return null; + } + }); + } + + private void close(Statement aStat) { + try { + if (aStat != null) { + aStat.close(); + } + } catch (SQLException e) { + LOG.error("Unable to close SQL statement", e); + } + } + + private void close(ResultSet aResultSet) { + try { + if (aResultSet != null) { + aResultSet.close(); + } + } catch (SQLException e) { + LOG.error("Unable to close ResultSet", e); + } + } + + private void close(Connection aCon) { + try { + if (aCon != null) { + aCon.close(); + } + } catch (SQLException e) { + LOG.error("Unable to close SQL Connection", e); + } + } + + private void initConnection() { + initConnection(null,null) ; + } + + + private void initConnection(String userName, String password) { + + Properties prop = getConfigHolder().getRangerSection() ; + String driverClassName = prop.getProperty("jdbc.driverClassName") ; + String url = prop.getProperty("jdbc.url") ; + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + if (driverClassName != null) { + try { + Driver driver = (Driver)Class.forName(driverClassName).newInstance() ; + DriverManager.registerDriver(driver); + } catch (SQLException e) { + String msgDesc = "initConnection: Caught SQLException while registering " + + "Hive driver, so Unable to connect to Hive Thrift Server instance."; + HadoopException hdpException = new HadoopException(msgDesc, e); + hdpException.generateResponseDataMap(false, getMessage(e), + msgDesc + errMsg, null, null); + throw hdpException; + } catch (IllegalAccessException ilae) { + String msgDesc = "initConnection: Class or its nullary constructor might not accessible." + + "So unable to initiate connection to hive thrift server instance."; + HadoopException hdpException = new HadoopException(msgDesc, ilae); + hdpException.generateResponseDataMap(false, getMessage(ilae), + msgDesc + errMsg, null, null); + throw hdpException; + } catch (InstantiationException ie) { + String msgDesc = "initConnection: Class may not have its nullary constructor or " + + "may be the instantiation fails for some other reason." + + "So unable to initiate connection to hive thrift server instance."; + HadoopException hdpException = new HadoopException(msgDesc, ie); + hdpException.generateResponseDataMap(false, getMessage(ie), + msgDesc + errMsg, null, null); + throw hdpException; + + } catch (ExceptionInInitializerError eie) { + String msgDesc = "initConnection: Got ExceptionInInitializerError, " + + "The initialization provoked by this method fails." + + "So unable to initiate connection to hive thrift server instance."; + HadoopException hdpException = new HadoopException(msgDesc, eie); + hdpException.generateResponseDataMap(false, getMessage(eie), + msgDesc + errMsg, null, null); + throw hdpException; + } catch (SecurityException se) { + String msgDesc = "initConnection: unable to initiate connection to hive thrift server instance," + + " The caller's class loader is not the same as or an ancestor " + + "of the class loader for the current class and invocation of " + + "s.checkPackageAccess() denies access to the package of this class."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), + msgDesc + errMsg, null, null); + throw hdpException; + } catch (Throwable t) { + String msgDesc = "initConnection: Unable to connect to Hive Thrift Server instance, " + + "please provide valid value of field : {jdbc.driverClassName}."; + HadoopException hdpException = new HadoopException(msgDesc, t); + hdpException.generateResponseDataMap(false, getMessage(t), + msgDesc + errMsg, null, "jdbc.driverClassName"); + throw hdpException; + } + } + + try { + + if (userName == null && password == null) { + con = DriverManager.getConnection(url) ; + } + else { + con = DriverManager.getConnection(url, userName, password) ; + } + + } catch (SQLException e) { + String msgDesc = "Unable to connect to Hive Thrift Server instance."; + HadoopException hdpException = new HadoopException(msgDesc, e); + hdpException.generateResponseDataMap(false, getMessage(e), msgDesc + + errMsg, null, null); + throw hdpException; + } catch (SecurityException se) { + String msgDesc = "Unable to connect to Hive Thrift Server instance."; + HadoopException hdpException = new HadoopException(msgDesc, se); + hdpException.generateResponseDataMap(false, getMessage(se), msgDesc + + errMsg, null, null); + throw hdpException; + } + } + + + public static void main(String[] args) { + + HiveClient hc = null ; + + if (args.length == 0) { + System.err.println("USAGE: java " + HiveClient.class.getName() + " dataSourceName <databaseName> <tableName> <columnName>") ; + System.exit(1) ; + } + + try { + hc = new HiveClient(args[0]) ; + + if (args.length == 2) { + List<String> dbList = hc.getDatabaseList(args[1],null) ; + if (dbList.size() == 0) { + System.out.println("No database found with db filter [" + args[1] + "]") ; + } + else { + for (String str : dbList ) { + System.out.println("database: " + str ) ; + } + } + } + else if (args.length == 3) { + List<String> tableList = hc.getTableList(args[2],null,null) ; + if (tableList.size() == 0) { + System.out.println("No tables found under database[" + args[1] + "] with table filter [" + args[2] + "]") ; + } + else { + for(String str : tableList) { + System.out.println("Table: " + str) ; + } + } + } + else if (args.length == 4) { + List<String> columnList = hc.getColumnList(args[3],null,null,null) ; + if (columnList.size() == 0) { + System.out.println("No columns found for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + args[3] + "]") ; + } + else { + for (String str : columnList ) { + System.out.println("Column: " + str) ; + } + } + } + + } + finally { + if (hc != null) { + hc.close(); + } + } + } + + public static HashMap<String, Object> testConnection(String serviceName, + Map<String, String> connectionProperties) { + + HashMap<String, Object> responseData = new HashMap<String, Object>(); + boolean connectivityStatus = false; + String errMsg = " You can still save the repository and start creating " + + "policies, but you would not be able to use autocomplete for " + + "resource names. Check xa_portal.log for more info."; + + HiveClient connectionObj = new HiveClient(serviceName, + (HashMap<String, String>) connectionProperties); + if (connectionObj != null) { + + List<String> testResult = connectionObj.getDatabaseList("*",null); + if (testResult != null && testResult.size() != 0) { + connectivityStatus = true; + } + } + if (connectivityStatus) { + String successMsg = "TestConnection Successful"; + generateResponseDataMap(connectivityStatus, successMsg, successMsg, + null, null, responseData); + } else { + String failureMsg = "Unable to retrieve any databases using given parameters."; + generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg, + null, null, responseData); + } + + connectionObj.close(); + return responseData; + } + +} http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/5a713177/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveConnectionMgr.java ---------------------------------------------------------------------- diff --git a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveConnectionMgr.java b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveConnectionMgr.java new file mode 100644 index 0000000..c355435 --- /dev/null +++ b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveConnectionMgr.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.ranger.services.hive.client; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.TimeUnit; + +import org.apache.log4j.Logger; +import org.apache.ranger.plugin.util.TimedEventUtil; +import org.apache.ranger.plugin.store.ServiceStoreFactory; +import org.apache.ranger.services.hive.client.HiveClient; + + +public class HiveConnectionMgr { + + private static Logger LOG = Logger.getLogger(HiveConnectionMgr.class); + + protected HashMap<String, HiveClient> hiveConnectionCache; + protected HashMap<String, Boolean> repoConnectStatusMap; + + + public HiveConnectionMgr() { + hiveConnectionCache = new HashMap<String, HiveClient>(); + repoConnectStatusMap = new HashMap<String, Boolean>(); + } + + + public HiveClient getHiveConnection(final String serviceName, final Map<String,String> configs) { + HiveClient hiveClient = null; + String serviceType = null; + try { + serviceType = ServiceStoreFactory + .instance() + .getServiceStore() + .getServiceByName(serviceName) + .getType(); + } catch (Exception ex) { + LOG.error("Service could not be found for the Service Name : " + serviceName , ex); + } + + if (serviceType != null) { + // get it from the cache + synchronized (hiveConnectionCache) { + hiveClient = hiveConnectionCache.get(serviceType); + if (hiveClient == null) { + if (configs != null) { + + final Callable<HiveClient> connectHive = new Callable<HiveClient>() { + @Override + public HiveClient call() throws Exception { + return new HiveClient(serviceName, configs); + } + }; + try { + hiveClient = TimedEventUtil.timedTask(connectHive, 5, TimeUnit.SECONDS); + } catch(Exception e){ + LOG.error("Error connecting hive repository : "+ + serviceName +" using config : "+ configs, e); + } + hiveConnectionCache.put(serviceName, hiveClient); + repoConnectStatusMap.put(serviceName, true); + } else { + LOG.error("Connection Config not defined for asset :" + + serviceName, new Throwable()); + } + } else { + try { + List<String> testConnect = hiveClient.getDatabaseList("*",null); + } catch(Exception e) { + hiveConnectionCache.remove(serviceType); + hiveClient = getHiveConnection(serviceName,configs); + } + } + } + } else { + LOG.error("Asset not found with name "+serviceName, new Throwable()); + } + return hiveClient; + } +}
