RANGER-853 Remove the unused project lookup-client as its contents were moved 
into individual plugin project since ranger-0.5 release


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/46895de3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/46895de3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/46895de3

Branch: refs/heads/master
Commit: 46895de3af07d3934fd776b8eedc5bc031fdfa3e
Parents: 073577d
Author: Alok Lal <[email protected]>
Authored: Fri Feb 19 13:37:32 2016 -0800
Committer: Alok Lal <[email protected]>
Committed: Fri Feb 19 14:51:19 2016 -0800

----------------------------------------------------------------------
 lookup-client/.gitignore                        |   4 -
 lookup-client/pom.xml                           |  96 ----
 .../security/KrbPasswordSaverLoginModule.java   |  77 ---
 .../hadoop/security/SecureClientLogin.java      | 133 -----
 .../apache/ranger/hadoop/client/HadoopFS.java   | 193 -------
 .../ranger/hadoop/client/HadoopFSTester.java    |  76 ---
 .../ranger/hadoop/client/config/BaseClient.java | 163 ------
 .../client/config/HadoopConfigHolder.java       | 345 -------------
 .../client/exceptions/HadoopException.java      |  60 ---
 .../apache/ranger/hbase/client/HBaseClient.java | 403 ---------------
 .../ranger/hbase/client/HBaseClientTester.java  |  92 ----
 .../apache/ranger/hive/client/HiveClient.java   | 510 -------------------
 .../ranger/hive/client/HiveClientTester.java    |  97 ----
 .../apache/ranger/knox/client/KnoxClient.java   | 386 --------------
 .../ranger/knox/client/KnoxClientTest.java      |  42 --
 .../ranger/knox/client/RangerConstants.java     |  42 --
 .../apache/ranger/storm/client/StormClient.java | 395 --------------
 .../storm/client/json/model/Topology.java       |  47 --
 .../client/json/model/TopologyListResponse.java |  38 --
 lookup-client/src/scripts/log4j.xml             |  43 --
 lookup-client/src/scripts/run-audit-test.sh     |  31 --
 lookup-client/src/scripts/run-hadoop-client.sh  |  23 -
 lookup-client/src/scripts/run-hbase-client.sh   |  29 --
 lookup-client/src/scripts/run-hive-client.sh    |  29 --
 .../src/scripts/xasecure-audit.properties       |  55 --
 25 files changed, 3409 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/.gitignore
----------------------------------------------------------------------
diff --git a/lookup-client/.gitignore b/lookup-client/.gitignore
deleted file mode 100644
index 8604ba3..0000000
--- a/lookup-client/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-/target/
-/bin/
-/target
-.settings/

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/pom.xml
----------------------------------------------------------------------
diff --git a/lookup-client/pom.xml b/lookup-client/pom.xml
deleted file mode 100644
index 6341bba..0000000
--- a/lookup-client/pom.xml
+++ /dev/null
@@ -1,96 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <artifactId>lookup-client</artifactId>
-    <name>Resource Lookup API Implementation</name>
-    <description>Resource Lookup API Implementation</description>
-    <packaging>jar</packaging>
-    <parent>
-        <groupId>org.apache.ranger</groupId>
-        <artifactId>ranger</artifactId>
-        <version>0.6.0</version>
-        <relativePath>..</relativePath>
-    </parent>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.calcite</groupId>
-            <artifactId>calcite-core</artifactId>
-            <version>${calcite.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.calcite</groupId>
-            <artifactId>calcite-avatica</artifactId>
-            <version>${calcite.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tez</groupId>
-            <artifactId>tez-api</artifactId>
-            <version>${tez.version}</version>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tez</groupId>
-            <artifactId>tez-runtime-library</artifactId>
-            <version>${tez.version}</version>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tez</groupId>
-            <artifactId>tez-runtime-internals</artifactId>
-            <version>${tez.version}</version>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tez</groupId>
-            <artifactId>tez-mapreduce</artifactId>
-            <version>${tez.version}</version>
-            <optional>true</optional>
-        </dependency>
-        <dependency>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-            <version>${commons.logging.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>${hadoop.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <version>${hadoop.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-server</artifactId>
-            <version>${hbase.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-common</artifactId>
-            <version>${hive.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-service</artifactId>
-            <version>${hive.version}</version>
-        </dependency>
-    </dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
 
b/lookup-client/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
deleted file mode 100644
index 6dbbb13..0000000
--- 
a/lookup-client/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.hadoop.security;
-
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.login.LoginException;
-import javax.security.auth.spi.LoginModule;
-
-public class KrbPasswordSaverLoginModule implements LoginModule {
-       
-    public static final String USERNAME_PARAM = 
"javax.security.auth.login.name";
-    public static final String PASSWORD_PARAM = 
"javax.security.auth.login.password";
-
-       @SuppressWarnings("rawtypes")
-       private Map sharedState = null ;
-       
-       public KrbPasswordSaverLoginModule() {
-       }
-
-       @Override
-       public boolean abort() throws LoginException {
-               return true;
-       }
-
-       @Override
-       public boolean commit() throws LoginException {
-               return true;
-       }
-
-       @SuppressWarnings("unchecked")
-       @Override
-       public void initialize(Subject subject, CallbackHandler 
callbackhandler, Map<String, ?> sharedMap, Map<String, ?> options) {
-               
-               this.sharedState = sharedMap ;
-               
-               String userName = (options != null) ? 
(String)options.get(USERNAME_PARAM) : null ;
-               if (userName != null) {
-                       this.sharedState.put(USERNAME_PARAM,userName) ;
-               }
-               String password = (options != null) ? 
(String)options.get(PASSWORD_PARAM) : null ;
-               
-               if (password != null) {
-                       
this.sharedState.put(PASSWORD_PARAM,password.toCharArray()) ;
-               }
-       }
-
-       @Override
-       public boolean login() throws LoginException {
-               return true;
-       }
-
-       @Override
-       public boolean logout() throws LoginException {
-               return true;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/hadoop/security/SecureClientLogin.java 
b/lookup-client/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
deleted file mode 100644
index 2e998ea..0000000
--- 
a/lookup-client/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.security;
-
-import java.io.IOException;
-import java.security.Principal;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import javax.security.auth.Subject;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
-import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-
-public class SecureClientLogin {
-
-       public synchronized static Subject loginUserFromKeytab(String user, 
String path) throws IOException {
-               try {
-                       Subject subject = new Subject();
-                       SecureClientLoginConfiguration loginConf = new 
SecureClientLoginConfiguration(true, user, path);
-                       LoginContext login = new 
LoginContext("hadoop-keytab-kerberos", subject, null, loginConf);
-                       subject.getPrincipals().add(new User(user, 
AuthenticationMethod.KERBEROS, login));
-                       login.login();
-                       return login.getSubject();
-               } catch (LoginException le) {
-                       throw new IOException("Login failure for " + user + " 
from keytab " + path, le);
-               }
-       }
-
-       public synchronized static Subject loginUserWithPassword(String user, 
String password) throws IOException {
-               String tmpPass = password;
-               try {
-                       Subject subject = new Subject();
-                       SecureClientLoginConfiguration loginConf = new 
SecureClientLoginConfiguration(false, user, password);
-                       LoginContext login = new 
LoginContext("hadoop-keytab-kerberos", subject, null, loginConf);
-                       subject.getPrincipals().add(new User(user, 
AuthenticationMethod.KERBEROS, login));
-                       login.login();
-                       return login.getSubject();
-               } catch (LoginException le) {
-                       throw new IOException("Login failure for " + user + " 
using password " + tmpPass.replaceAll(".","*"), le);
-               }
-       }
-
-       public synchronized static Subject login(String user) throws 
IOException {
-               Subject subject = new Subject();
-               subject.getPrincipals().add(new User(user));
-               return subject;
-       }
-
-       public static Set<Principal> getUserPrincipals(Subject aSubject) {
-               if (aSubject != null) {
-                       Set<User> list = aSubject.getPrincipals(User.class);
-                       if (list != null) {
-                               Set<Principal> ret = new HashSet<Principal>();
-                               for (User a : list) {
-                                       ret.add(a);
-                               }
-                               return ret;
-                       } else {
-                               return null;
-                       }
-               } else {
-                       return null;
-               }
-       }
-       
-       public static Principal createUserPrincipal(String aLoginName) {
-               return new User(aLoginName) ;
-       }
-
-}
-
-class SecureClientLoginConfiguration extends 
javax.security.auth.login.Configuration {
-
-       private Map<String, String> kerberosOptions = new HashMap<String, 
String>();
-       private boolean usePassword = false ;
-
-       public SecureClientLoginConfiguration(boolean useKeyTab, String 
principal, String credential) {
-               kerberosOptions.put("principal", principal);
-               kerberosOptions.put("debug", "false");
-               if (useKeyTab) {
-                       kerberosOptions.put("useKeyTab", "true");
-                       kerberosOptions.put("keyTab", credential);
-                       kerberosOptions.put("doNotPrompt", "true");
-               } else {
-                       usePassword = true ;
-                       kerberosOptions.put("useKeyTab", "false");
-                       
kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, principal);
-                       
kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, credential);
-                       kerberosOptions.put("doNotPrompt", "false");
-                       kerberosOptions.put("useFirstPass", "true");
-                       kerberosOptions.put("tryFirstPass","false") ;
-               }
-               kerberosOptions.put("storeKey", "true");
-               kerberosOptions.put("refreshKrb5Config", "true");
-       }
-
-       @Override
-       public AppConfigurationEntry[] getAppConfigurationEntry(String appName) 
{
-               AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new 
AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(), 
LoginModuleControlFlag.REQUIRED, kerberosOptions);
-               if (usePassword) {
-                       AppConfigurationEntry KERBEROS_PWD_SAVER = new 
AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), 
LoginModuleControlFlag.REQUIRED, kerberosOptions);
-                       return new AppConfigurationEntry[] { 
KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN };
-               }
-               else {
-                       return new AppConfigurationEntry[] { 
KEYTAB_KERBEROS_LOGIN };
-               }
-       }
-       
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java 
b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
deleted file mode 100644
index e744a1d..0000000
--- a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFS.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client;
-
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.UnknownHostException;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.ranger.hadoop.client.config.BaseClient;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-public class HadoopFS extends BaseClient {
-
-       private static final Log LOG = LogFactory.getLog(HadoopFS.class) ;
-
-       public HadoopFS(String dataSource) {
-               super(dataSource) ;
-       }
-       
-       public HadoopFS(String dataSource, HashMap<String,String> 
connectionProperties) {
-               super(dataSource,connectionProperties) ;
-       }
-       
-       private List<String> listFilesInternal(String baseDir, String 
fileMatching) {
-               List<String> fileList = new ArrayList<String>() ;
-               ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               try {
-                       
Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-                       String dirPrefix = (baseDir.endsWith("/") ? baseDir : 
(baseDir + "/")) ;
-                       String filterRegEx = null;
-                       if (fileMatching != null && 
fileMatching.trim().length() > 0) {
-                               filterRegEx = fileMatching.trim() ;
-                       }
-                       
-                       Configuration conf = new Configuration() ;
-                       UserGroupInformation.setConfiguration(conf);
-                       
-                       FileSystem fs = null ;
-                       try {
-                               fs = FileSystem.get(conf) ;
-                               
-                               FileStatus[] fileStats = fs.listStatus(new 
Path(baseDir)) ;
-                               if (fileStats != null) {
-                                       for(FileStatus stat : fileStats) {
-                                               Path path = stat.getPath() ;
-                                               String pathComponent = 
path.getName() ;
-                                               if (filterRegEx == null) {
-                                                       fileList.add(dirPrefix 
+ pathComponent) ;
-                                               }
-                                               else if 
(FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
-                                                       fileList.add(dirPrefix 
+ pathComponent) ;
-                                               }
-                                       }
-                               }
-                       } catch (UnknownHostException uhe) {
-                               String msgDesc = "listFilesInternal: Unable to 
connect using given config parameters"
-                                               + " of Hadoop environment [" + 
getDataSource() + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, uhe);
-                               hdpException.generateResponseDataMap(false, 
getMessage(uhe),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (FileNotFoundException fne) {
-                               String msgDesc = "listFilesInternal: Unable to 
locate files using given config parameters "
-                                               + "of Hadoop environment [" + 
getDataSource() + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, fne);
-                               hdpException.generateResponseDataMap(false, 
getMessage(fne),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       }
-               } catch (IOException ioe) {
-                       String msgDesc = "listFilesInternal: Unable to get 
listing of files for directory "
-                                       + baseDir
-                                       + "] from Hadoop environment ["
-                                       + getDataSource()
-                                       + "].";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, ioe);
-                       hdpException.generateResponseDataMap(false, 
getMessage(ioe),
-                                       msgDesc + errMsg, null, null);
-                       throw hdpException;
-
-               } catch (IllegalArgumentException iae) {
-                       String msgDesc = "Unable to get listing of files for 
directory ["
-                                       + baseDir + "] from Hadoop environment 
[" + getDataSource()
-                                       + "].";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, iae);
-                       hdpException.generateResponseDataMap(false, 
getMessage(iae),
-                                       msgDesc + errMsg, null, null);
-                       throw hdpException;
-               }
-               finally {
-                       Thread.currentThread().setContextClassLoader(prevCl);
-               }
-               return fileList ;
-       }
-
-       
-       public List<String> listFiles(final String baseDir, final String 
fileMatching) {
-
-               PrivilegedAction<List<String>> action = new 
PrivilegedAction<List<String>>() {
-                       @Override
-                       public List<String> run() {
-                               return listFilesInternal(baseDir, fileMatching) 
;
-                       }
-                       
-               };
-               return Subject.doAs(getLoginSubject(),action) ;
-       }
-       
-       public static final void main(String[] args) {
-               
-               if (args.length < 2) {
-                       System.err.println("USAGE: java " + 
HadoopFS.class.getName() + " repositoryName  basedirectory  [filenameToMatch]") 
;
-                       System.exit(1) ;
-               }
-               
-               String repositoryName = args[0] ;
-               String baseDir = args[1] ;
-               String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
-               
-               HadoopFS fs = new HadoopFS(repositoryName) ;
-               List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
-               if (fsList != null && fsList.size() > 0) {
-                       for(String s : fsList) {
-                               System.out.println(s) ;
-                       }
-               }
-               else {
-                       System.err.println("Unable to get file listing for [" + 
baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in 
repository [" + repositoryName + "]") ;
-               }
-       }
-
-       public static HashMap<String, Object> testConnection(String dataSource,
-                       HashMap<String, String> connectionProperties) {
-
-               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
-               boolean connectivityStatus = false;
-               HadoopFS connectionObj = new HadoopFS(dataSource, 
connectionProperties);
-               if (connectionObj != null) {
-                       List<String> testResult = connectionObj.listFiles("/", 
null);
-                       if (testResult != null && testResult.size() != 0) {
-                               connectivityStatus = true;
-                       }
-               }
-               if (connectivityStatus) {
-                       String successMsg = "TestConnection Successful";
-                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
-                                       null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any files using 
given parameters, "
-                                       + "You can still save the repository 
and start creating policies, "
-                                       + "but you would not be able to use 
autocomplete for resource names. "
-                                       + "Check xa_portal.log for more info.";
-                       generateResponseDataMap(connectivityStatus, failureMsg, 
failureMsg,
-                                       null, null, responseData);
-               }
-               return responseData;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
 
b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
deleted file mode 100644
index dc73801..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/HadoopFSTester.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-public class HadoopFSTester {
-
-       public static void main(String[] args) throws Throwable {
-               if (args.length < 3) {
-                       System.err.println("USAGE: java " + 
HadoopFS.class.getName() + " repositoryName propertyFile basedirectory  
[filenameToMatch]") ;
-                       System.exit(1) ;
-               }
-               
-               String repositoryName = args[0] ;
-               String propFile = args[1] ;
-               String baseDir = args[2] ;
-               String fileNameToMatch = (args.length == 3 ? null : args[3]) ;
-
-               Properties conf = new Properties() ;
-               InputStream in = 
HadoopFSTester.class.getClassLoader().getResourceAsStream(propFile) ;
-               try {
-                       conf.load(in);
-               }
-               finally {
-                       if (in != null) {
-                               try {
-                                       in.close() ;
-                               }
-                               catch(IOException ioe) {
-                                       // Ignore IOE when closing stream
-                               }
-                       }
-               }
-               
-               HashMap<String,String> prop = new HashMap<String,String>() ;
-               for(Object key : conf.keySet()) {
-                       Object val = conf.get(key) ;
-                       prop.put((String)key, (String)val) ;
-               }
-               
-               HadoopFS fs = new HadoopFS(repositoryName, prop) ;
-               List<String> fsList = fs.listFiles(baseDir, fileNameToMatch) ;
-               if (fsList != null && fsList.size() > 0) {
-                       for(String s : fsList) {
-                               System.out.println(s) ;
-                       }
-               }
-               else {
-                       System.err.println("Unable to get file listing for [" + 
baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in 
repository [" + repositoryName + "]") ;
-               }
-
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
 
b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
deleted file mode 100644
index e7775d8..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/BaseClient.java
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client.config;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.security.SecureClientLogin;
-import org.apache.ranger.hadoop.client.HadoopFS;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-public abstract class BaseClient {
-       private static final Log LOG = LogFactory.getLog(HadoopFS.class) ;
-       
-       private String dataSource ;
-       private Subject loginSubject ;
-       private HadoopConfigHolder configHolder;
-       
-       protected HashMap<String,String> connectionProperties ;
-       
-       public BaseClient(String dataSource) {
-               this.dataSource = dataSource ;
-               init() ;
-               login() ;
-       }
-       
-       public BaseClient(String dataSource, HashMap<String,String> 
connectionProperties) {
-               this.dataSource = dataSource ;
-               this.connectionProperties = connectionProperties ;
-               init() ;
-               login() ;
-       }
-       
-       
-       private void init() {
-               if (connectionProperties == null) {
-                       configHolder = 
HadoopConfigHolder.getInstance(dataSource) ;
-               }
-               else {
-                       configHolder = 
HadoopConfigHolder.getInstance(dataSource,connectionProperties) ;
-               }
-       }
-       
-       
-       protected void login() {
-               ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               try {
-                       
Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
-                       String userName = configHolder.getUserName() ;
-                       if (userName == null) {
-                               String msgDesc = "Unable to find login username 
for hadoop environment, ["
-                                               + dataSource + "]";
-                               HadoopException hdpException = new 
HadoopException(msgDesc);
-                               hdpException.generateResponseDataMap(false, 
msgDesc, msgDesc + errMsg,
-                                               null, null);
-
-                               throw hdpException;
-                       }
-                       String keyTabFile = configHolder.getKeyTabFile() ;
-                       if (keyTabFile != null) {
-                               if ( configHolder.isKerberosAuthentication() ) {
-                                       LOG.info("Init Login: security enabled, 
using username/keytab");
-                                       loginSubject = 
SecureClientLogin.loginUserFromKeytab(userName, keyTabFile) ;
-                               }
-                               else {
-                                       LOG.info("Init Login: using username");
-                                       loginSubject = 
SecureClientLogin.login(userName) ;
-                               }
-                       }
-                       else {
-                               String password = configHolder.getPassword() ;
-                               if ( configHolder.isKerberosAuthentication() ) {
-                                       LOG.info("Init Login: using 
username/password");
-                                       loginSubject = 
SecureClientLogin.loginUserWithPassword(userName, password) ;
-                               }
-                               else {
-                                       LOG.info("Init Login: security not 
enabled, using username");
-                                       loginSubject = 
SecureClientLogin.login(userName) ;
-                               }
-                       }
-               } catch (IOException ioe) {
-                       String msgDesc = "Unable to login to Hadoop environment 
["
-                                       + dataSource + "]";
-
-                       HadoopException hdpException = new 
HadoopException(msgDesc, ioe);
-                       hdpException.generateResponseDataMap(false, 
getMessage(ioe),
-                                       msgDesc + errMsg, null, null);
-                       throw hdpException;
-               } catch (SecurityException se) {
-                       String msgDesc = "Unable to login to Hadoop environment 
["
-                                       + dataSource + "]";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                       hdpException.generateResponseDataMap(false, 
getMessage(se),
-                                       msgDesc + errMsg, null, null);
-                       throw hdpException;
-               } finally {
-                       Thread.currentThread().setContextClassLoader(prevCl);
-               }
-       }
-       
-       public String getDataSource() {
-               return dataSource ;
-       }
-
-       protected Subject getLoginSubject() {
-               return loginSubject;
-       }
-
-       protected HadoopConfigHolder getConfigHolder() {
-               return configHolder;
-       }
-       
-       public static void generateResponseDataMap(boolean connectivityStatus,
-                       String message, String description, Long objectId,
-                       String fieldName, HashMap<String, Object> responseData) 
{
-               responseData.put("connectivityStatus", connectivityStatus);
-               responseData.put("message", message);
-               responseData.put("description", description);
-               responseData.put("objectId", objectId);
-               responseData.put("fieldName", fieldName);
-       }
-
-       public static String getMessage(Throwable excp) {
-               List<String> errList = new ArrayList<String>();
-               while (excp != null) {
-                       if (!errList.contains(excp.getMessage() + ". \n")) {
-                               if (excp.getMessage() != null && 
!(excp.getMessage().equalsIgnoreCase(""))) {
-                                       errList.add(excp.getMessage() + ". \n");
-                               }
-                       }
-                       excp = excp.getCause();
-               }
-               return StringUtils.join(errList, "");
-       }
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
 
b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
deleted file mode 100644
index f9b3eee..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/config/HadoopConfigHolder.java
+++ /dev/null
@@ -1,345 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client.config;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-public class HadoopConfigHolder  {
-       private static final Log LOG = 
LogFactory.getLog(HadoopConfigHolder.class) ;
-       public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = 
"hadoop-login.properties" ;
-       public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = 
"datasource.properties" ;
-       public static final String RESOURCEMAP_PROP_FILE = 
"resourcenamemap.properties" ;
-       public static final String DEFAULT_RESOURCE_NAME = "core-site.xml" ;
-       public static final String RANGER_SECTION_NAME = "xalogin.xml" ;
-       public static final String RANGER_LOGIN_USER_NAME_PROP = "username" ;
-       public static final String RANGER_LOGIN_KEYTAB_FILE_PROP = "keytabfile" 
;
-       public static final String RANGER_LOGIN_PASSWORD = "password" ;
-       public static final String HADOOP_SECURITY_AUTHENTICATION = 
"hadoop.security.authentication";
-       public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = 
"kerberos";
-       public static final String HADOOP_RPC_PROTECTION = 
"hadoop.rpc.protection";
-       
-
-       private static boolean initialized = false ;
-       private static HashMap<String,HashMap<String,Properties>> 
dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>() ;
-       private static Properties globalLoginProp = new Properties() ;
-       private static HashMap<String,HadoopConfigHolder> 
dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>() ;
-       private static Properties resourcemapProperties = null ;
-       
-       
-       private String datasourceName ;
-       private String userName ;
-       private String keyTabFile ;
-       private String password ;
-       private boolean isKerberosAuth ;
-       
-       private HashMap<String,String>  connectionProperties;
-       
-       public static HadoopConfigHolder getInstance(String aDatasourceName) {
-               HadoopConfigHolder ret = 
dataSource2HadoopConfigHolder.get(aDatasourceName) ;
-               if (ret == null) {
-                       synchronized(HadoopConfigHolder.class) {
-                               HadoopConfigHolder temp = ret ;
-                               if (temp == null) {
-                                       ret = new 
HadoopConfigHolder(aDatasourceName) ;
-                                       
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-                               }
-                       }
-               }
-               return ret ;
-       }
-       
-       public static HadoopConfigHolder getInstance(String aDatasourceName, 
HashMap<String,String> connectionProperties) {
-               HadoopConfigHolder ret = 
dataSource2HadoopConfigHolder.get(aDatasourceName) ;
-               if (ret == null) {
-                       synchronized(HadoopConfigHolder.class) {
-                               HadoopConfigHolder temp = ret ;
-                               if (temp == null) {
-                                       ret = new 
HadoopConfigHolder(aDatasourceName,connectionProperties) ;
-                                       
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-                               }
-                       }
-               }
-               else {
-                       if (connectionProperties !=null  &&  
!connectionProperties.equals(ret.connectionProperties)) {
-                               ret = new 
HadoopConfigHolder(aDatasourceName,connectionProperties) ;
-                               
dataSource2HadoopConfigHolder.remove(aDatasourceName) ;
-                               
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
-                       }
-               }
-               return ret ;
-       }
-       
-       
-
-       private HadoopConfigHolder(String aDatasourceName) {
-               datasourceName = aDatasourceName;
-               if ( ! initialized ) {
-                       init() ;
-               }
-               initLoginInfo();
-       }
-       
-       private HadoopConfigHolder(String aDatasourceName, 
HashMap<String,String> connectionProperties) {
-               datasourceName = aDatasourceName;
-               this.connectionProperties = connectionProperties ;
-               initConnectionProp() ;
-               initLoginInfo();
-       }
-       
-       private void initConnectionProp() {
-               for(String key : connectionProperties.keySet()) {
-                       
-                       String resourceName = getResourceName(key) ;
-                       
-                       if (resourceName == null) {
-                               resourceName = RANGER_SECTION_NAME ;
-                       }
-                       String val = connectionProperties.get(key) ;
-                       addConfiguration(datasourceName, resourceName, key, val 
);
-               }
-       }
-       
-       private String getResourceName(String key) {
-               
-               if (resourcemapProperties == null) {
-                       initResourceMap();
-               }
-               
-               if (resourcemapProperties != null) {
-                       return resourcemapProperties.getProperty(key);
-               }
-               else {
-                       return null;
-               }
-       }
-
-       public static void initResourceMap() {
-               if (resourcemapProperties == null) {
-                       resourcemapProperties = new Properties() ;
-                       InputStream in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE)
 ;
-                       if (in != null) {
-                               try {
-                                       resourcemapProperties.load(in);
-                               } catch (IOException e) {
-                                       throw new HadoopException("Unable to 
load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
-                               }
-                       }
-                       else {
-                               throw new HadoopException("Unable to locate 
resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class 
path.");
-                       }
-               }
-       }
-
-       
-       
-       private static synchronized void init() {
-
-               if (initialized) {
-                       return ;
-               }
-
-               try {
-                       InputStream in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE)
 ;
-                       if (in != null) {
-                               Properties prop = new Properties() ;
-                               try {
-                                       prop.load(in) ;
-                               } catch (IOException e) {
-                                       throw new HadoopException("Unable to 
get configuration information for Hadoop environments", e);
-                               }
-                               finally {
-                                       try {
-                                               in.close();
-                                       } catch (IOException e) {
-                                               // Ignored exception when the 
stream is closed.
-                                       } 
-                               }
-       
-                               if (prop.size() == 0) 
-                                       return ;
-                               
-                               for(Object keyobj : prop.keySet()) {
-                                       String key = (String)keyobj;
-                                       String val = prop.getProperty(key) ;
-                                       
-                                       int dotLocatedAt = key.indexOf(".") ;
-                                       
-                                       if (dotLocatedAt == -1) {
-                                               continue ;
-                                       }
-                                       
-                                       String dataSource = 
key.substring(0,dotLocatedAt) ;
-                                       
-                                       String propKey = 
key.substring(dotLocatedAt+1) ;
-                                       int resourceFoundAt =  
propKey.indexOf(".") ;
-                                       if (resourceFoundAt > -1) {
-                                               String resourceName = 
propKey.substring(0, resourceFoundAt) + ".xml" ; 
-                                               propKey = 
propKey.substring(resourceFoundAt+1) ;
-                                               addConfiguration(dataSource, 
resourceName, propKey, val) ;
-                                       }
-                                       
-                               }
-                       }
-                       
-                       in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE)
 ;
-                       if (in != null) {
-                               Properties tempLoginProp = new Properties() ;
-                               try {
-                                       tempLoginProp.load(in) ;
-                               } catch (IOException e) {
-                                       throw new HadoopException("Unable to 
get login configuration information for Hadoop environments from file: [" + 
GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e);
-                               }
-                               finally {
-                                       try {
-                                               in.close();
-                                       } catch (IOException e) {
-                                               // Ignored exception when the 
stream is closed.
-                                       } 
-                               }
-                               globalLoginProp = tempLoginProp ;
-                       }
-               }
-               finally {
-                       initialized = true ;
-               }
-       }
-       
-       
-       private void initLoginInfo() {
-               Properties prop = this.getRangerSection() ;
-               if (prop != null) {
-                       userName = 
prop.getProperty(RANGER_LOGIN_USER_NAME_PROP) ;
-                       keyTabFile = 
prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP) ;
-                       password = prop.getProperty(RANGER_LOGIN_PASSWORD) ;
-               
-                       if ( getHadoopSecurityAuthentication() != null) {
-                               isKerberosAuth = ( 
getHadoopSecurityAuthentication().equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
-                       }
-                       else {
-                               isKerberosAuth = (userName != null) && 
(userName.indexOf("@") > -1) ;
-                       }
-                                       
-               }
-       }
-
-       public Properties getRangerSection() {
-               Properties prop = this.getProperties(RANGER_SECTION_NAME) ;
-               if (prop == null) {
-                       prop = globalLoginProp ;
-               }
-               return prop ;
-       }
-
-
-
-       private static void addConfiguration(String dataSource, String 
resourceName, String propertyName, String value) {
-
-               if (dataSource == null || dataSource.isEmpty()) {
-                       return ;
-               }
-               
-               if (propertyName == null || propertyName.isEmpty()) {
-                       return ;
-               }
-               
-               if (resourceName == null) {
-                       resourceName = DEFAULT_RESOURCE_NAME ;
-               }
-               
-               
-               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(dataSource) ;
-               
-               if (resourceName2PropertiesMap == null) {
-                       resourceName2PropertiesMap = new 
HashMap<String,Properties>() ;
-                       dataSource2ResourceListMap.put(dataSource, 
resourceName2PropertiesMap) ;
-               }
-               
-               Properties prop = resourceName2PropertiesMap.get(resourceName) ;
-               if (prop == null) {
-                       prop = new Properties() ;
-                       resourceName2PropertiesMap.put(resourceName, prop) ;
-               }
-               if (value == null) {
-                       prop.remove(propertyName) ;
-               }
-               else {
-                       prop.put(propertyName, value) ;
-               }
-       }
-       
-       
-       public String getDatasourceName() {
-               return datasourceName ;
-       }
-       
-       public boolean hasResourceExists(String aResourceName) {
-               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(datasourceName) ;
-               return (resourceName2PropertiesMap != null && 
resourceName2PropertiesMap.containsKey(aResourceName)) ;
-       }
-
-       public Properties getProperties(String aResourceName) {
-               Properties ret = null ;
-               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(datasourceName) ;
-               if (resourceName2PropertiesMap != null) {
-                       ret =  resourceName2PropertiesMap.get(aResourceName) ;
-               }
-               return ret ;
-       }
-       
-       public String getHadoopSecurityAuthentication() {
-               Properties repoParam = null ;
-               String ret = null;
-               
-               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(this.getDatasourceName()) ;
-               
-               if ( resourceName2PropertiesMap != null) {
-                       
repoParam=resourceName2PropertiesMap.get(DEFAULT_RESOURCE_NAME);
-               }
-               
-               if ( repoParam != null ) {
-                       ret = 
(String)repoParam.get(HADOOP_SECURITY_AUTHENTICATION);
-               }
-               return ret;
-       }
-       
-       public String getUserName() {
-               return userName;
-       }
-
-       public String getKeyTabFile() {
-               return keyTabFile;
-       }
-
-       public String getPassword() {
-               return password;
-       }
-
-       public boolean isKerberosAuthentication() {
-               return isKerberosAuth;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
 
b/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
deleted file mode 100644
index 5614343..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hadoop/client/exceptions/HadoopException.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hadoop.client.exceptions;
-
-import java.util.HashMap;
-
-public class HadoopException extends RuntimeException {
-
-       private static final long serialVersionUID = 8872734935128535649L;
-       
-       public HashMap<String, Object> responseData;
-
-       public HadoopException() {
-               super();
-               // TODO Auto-generated constructor stub
-       }
-
-       public HadoopException(String message, Throwable cause) {
-               super(message, cause);
-               // TODO Auto-generated constructor stub
-       }
-
-       public HadoopException(String message) {
-               super(message);
-               // TODO Auto-generated constructor stub
-       }
-
-       public HadoopException(Throwable cause) {
-               super(cause);
-               // TODO Auto-generated constructor stub
-       }
-
-       public void generateResponseDataMap(boolean connectivityStatus,
-                       String message, String description, Long objectId, 
String fieldName) {
-               responseData = new HashMap<String, Object>();
-               responseData.put("connectivityStatus", connectivityStatus);
-               responseData.put("message", message);
-               responseData.put("description", description);
-               responseData.put("objectId", objectId);
-               responseData.put("fieldName", fieldName);
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java 
b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
deleted file mode 100644
index 1df5a0b..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClient.java
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hbase.client;
-
-import java.io.IOException;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map.Entry;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.hbase.MasterNotRunningException;
-import org.apache.hadoop.hbase.ZooKeeperConnectionException;
-import org.apache.hadoop.hbase.client.HBaseAdmin;
-import org.apache.ranger.hadoop.client.config.BaseClient;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-import com.google.protobuf.ServiceException;
-
-public class HBaseClient extends BaseClient {
-
-       private static final Log LOG = LogFactory.getLog(HBaseClient.class) ;
-
-       private Subject subj = null ;
-
-       public HBaseClient(String dataSource) {
-               super(dataSource) ;             
-       }
-
-       public HBaseClient(String dataSource,HashMap<String,String> 
connectionProp) {
-               super(dataSource, addDefaultHBaseProp(connectionProp)) ;        
        
-       }
-       
-       //TODO: temporary solution - to be added to the UI for HBase 
-       private static HashMap<String,String> 
addDefaultHBaseProp(HashMap<String,String> connectionProp) {
-               if (connectionProp != null) {
-                       String param = "zookeeper.znode.parent" ;
-                       String unsecuredPath = "/hbase-unsecure" ;
-                       String authParam = "hadoop.security.authorization" ;
-                       
-                       String ret = connectionProp.get(param) ;
-                       LOG.info("HBase connection has [" + param + "] with 
value [" + ret + "]");
-                       if (ret == null) {
-                               ret = connectionProp.get(authParam) ;
-                               LOG.info("HBase connection has [" + authParam + 
"] with value [" + ret + "]");
-                               if (ret != null && 
ret.trim().equalsIgnoreCase("false")) {
-                                       LOG.info("HBase connection is resetting 
[" + param + "] with value [" + unsecuredPath + "]");
-                                       connectionProp.put(param, 
unsecuredPath) ;
-                               }
-                       }
-               }
-               return connectionProp;
-       }
-       
-       public static HashMap<String, Object> testConnection(String dataSource,
-                       HashMap<String, String> connectionProperties) {
-
-               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               boolean connectivityStatus = false;
-
-               HBaseClient connectionObj = new HBaseClient(dataSource,
-                               connectionProperties);
-               if (connectionObj != null) {
-                       connectivityStatus = connectionObj.getHBaseStatus();
-               }
-
-               if (connectivityStatus) {
-                       String successMsg = "TestConnection Successful";
-                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
-                                       null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any databases 
using given parameters.";
-                       generateResponseDataMap(connectivityStatus, failureMsg, 
failureMsg
-                                       + errMsg, null, null, responseData);
-               }
-               return responseData;
-       }
-       
-       public boolean getHBaseStatus() {
-               boolean hbaseStatus = false;
-               subj = getLoginSubject();
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (subj != null) {
-                       ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
-                       try {
-                               
Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-       
-                               hbaseStatus = Subject.doAs(subj, new 
PrivilegedAction<Boolean>() {
-                                       @Override
-                                       public Boolean run() {
-                                               Boolean hbaseStatus1 = false;
-                                               try {
-                                                   LOG.info("getHBaseStatus: 
creating default Hbase configuration");
-                                                       Configuration conf = 
HBaseConfiguration.create() ;                                      
-                                                       
LOG.info("getHBaseStatus: setting config values from client");
-                                                       
setClientConfigValues(conf);                                            
-                                                   LOG.info("getHBaseStatus: 
checking HbaseAvailability with the new config");
-                                                       
HBaseAdmin.checkHBaseAvailable(conf);                                   
-                                                   LOG.info("getHBaseStatus: 
no exception: HbaseAvailability true");
-                                                       hbaseStatus1 = true;
-                                               } catch 
(ZooKeeperConnectionException zce) {
-                                                       String msgDesc = 
"getHBaseStatus: Unable to connect to `ZooKeeper` "
-                                                                       + 
"using given config parameters.";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, zce);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(zce),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                                       
-                                               } catch 
(MasterNotRunningException mnre) {
-                                                       String msgDesc = 
"getHBaseStatus: Looks like `Master` is not running, "
-                                                                       + "so 
couldn't check that running HBase is available or not, "
-                                                                       + 
"Please try again later.";
-                                                       HadoopException 
hdpException = new HadoopException(
-                                                                       
msgDesc, mnre);
-                                                       
hdpException.generateResponseDataMap(false,
-                                                                       
getMessage(mnre), msgDesc + errMsg,
-                                                                       null, 
null);
-                                                       throw hdpException;
-
-                                               } catch (ServiceException se) {
-                                                       String msgDesc = 
"getHBaseStatus: Unable to check availability of "
-                                                                       + 
"Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, se);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(se),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                                       
-                                               } catch(IOException io) {
-                                                       String msgDesc = 
"getHBaseStatus: Unable to check availability of"
-                                                                       + " 
Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, io);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(io),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                                       
-                                               }  catch (Throwable e) {
-                                                       String msgDesc = 
"getHBaseStatus: Unable to check availability of"
-                                                                       + " 
Hbase environment [" + getConfigHolder().getDatasourceName() + "].";
-                                                       LOG.error(msgDesc);
-                                                       hbaseStatus1 = false;
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, e);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(e),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                               }
-                                               return hbaseStatus1;
-                                       }
-                               }) ;
-                       } catch (SecurityException se) {
-                               String msgDesc = "getHBaseStatus: Unable to 
connect to HBase Server instance, "
-                                               + "current thread might not be 
able set the context ClassLoader.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                               hdpException.generateResponseDataMap(false, 
getMessage(se),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } finally {
-                               
Thread.currentThread().setContextClassLoader(prevCl);
-                       }
-               } else {
-                       LOG.error("getHBaseStatus: secure login not done, 
subject is null");
-               }
-               
-               return hbaseStatus;
-       }
-       
-       private void setClientConfigValues(Configuration conf) {
-               if (this.connectionProperties == null) return;
-               Iterator<Entry<String, String>> i =  
this.connectionProperties.entrySet().iterator();
-               while (i.hasNext()) {
-                       Entry<String, String> e = i.next();
-                       String v = conf.get(e.getKey());
-                       if (v != null && !v.equalsIgnoreCase(e.getValue())) {
-                               conf.set(e.getKey(), e.getValue());
-                       }
-               }               
-       }
-
-       public List<String> getTableList(final String tableNameMatching) {
-               List<String> ret = null ;
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               
-               subj = getLoginSubject();
-               
-               if (subj != null) {
-                       ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
-                       try {
-                               
Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-       
-                               ret = Subject.doAs(subj, new 
PrivilegedAction<List<String>>() {
-               
-                                       @Override
-                                       public List<String> run() {
-                                               
-                                               List<String> tableList = new 
ArrayList<String>() ;
-                                               HBaseAdmin admin = null ;
-                                               try {
-                                                       
-                                                       Configuration conf = 
HBaseConfiguration.create() ;
-                                                       admin = new 
HBaseAdmin(conf) ;
-                                                       for (HTableDescriptor 
htd : admin.listTables(tableNameMatching)) {
-                                                               
tableList.add(htd.getNameAsString()) ;
-                                                       }
-                                               } catch 
(ZooKeeperConnectionException zce) {
-                                                       String msgDesc = 
"getTableList: Unable to connect to `ZooKeeper` "
-                                                                       + 
"using given config parameters.";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, zce);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(zce),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                                       
-                                               } catch 
(MasterNotRunningException mnre) {
-                                                       String msgDesc = 
"getTableList: Looks like `Master` is not running, "
-                                                                       + "so 
couldn't check that running HBase is available or not, "
-                                                                       + 
"Please try again later.";
-                                                       HadoopException 
hdpException = new HadoopException(
-                                                                       
msgDesc, mnre);
-                                                       
hdpException.generateResponseDataMap(false,
-                                                                       
getMessage(mnre), msgDesc + errMsg,
-                                                                       null, 
null);
-                                                       throw hdpException;
-
-                                               }  catch(IOException io) {
-                                                       String msgDesc = 
"Unable to get HBase table List for [repository:"
-                                                                       + 
getConfigHolder().getDatasourceName() + ",table-match:" 
-                                                                       + 
tableNameMatching + "].";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, io);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(io),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                               }   catch (Throwable e) {
-                                                       String msgDesc = 
"Unable to get HBase table List for [repository:"
-                                                                       + 
getConfigHolder().getDatasourceName() + ",table-match:" 
-                                                                       + 
tableNameMatching + "].";
-                                                       LOG.error(msgDesc);
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, e);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(e),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                               }
-                                               finally {
-                                                       if (admin != null) {
-                                                               try {
-                                                                       
admin.close() ;
-                                                               } catch 
(IOException e) {
-                                                                       
LOG.error("Unable to close HBase connection [" + 
getConfigHolder().getDatasourceName() + "]", e);
-                                                               }
-                                                       }
-                                               }
-                                               return tableList ;
-                                       }
-                                       
-                               }) ;
-                       }
-                       finally {
-                               
Thread.currentThread().setContextClassLoader(prevCl);
-                       }
-               }
-               return ret ;
-       }
-       
-       
-       public List<String> getColumnFamilyList(final String tableName, final 
String columnFamilyMatching) {
-               List<String> ret = null ;
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               
-               subj = getLoginSubject();
-               if (subj != null) {
-                       ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
-                       try {
-                               
Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
-                               
-                               ret = Subject.doAs(subj, new 
PrivilegedAction<List<String>>() {
-               
-                                       @Override
-                                       public List<String> run() {
-                                               
-                                               List<String> colfList = new 
ArrayList<String>() ;
-                                               HBaseAdmin admin = null ;
-                                               try {
-                                                       Configuration conf = 
HBaseConfiguration.create();
-                                                       admin = new 
HBaseAdmin(conf) ;
-                                                       HTableDescriptor htd = 
admin.getTableDescriptor(tableName.getBytes()) ;
-                                                       if (htd != null) {
-                                                               for 
(HColumnDescriptor hcd : htd.getColumnFamilies()) {
-                                                                       String 
colf = hcd.getNameAsString() ;
-                                                                       if 
(colf.matches(columnFamilyMatching)) {
-                                                                               
if (!colfList.contains(colf)) {
-                                                                               
        colfList.add(colf) ;
-                                                                               
}
-                                                                       }
-                                                               }
-                                                       }
-                                               }  catch 
(ZooKeeperConnectionException zce) {
-                                                       String msgDesc = 
"getColumnFamilyList: Unable to connect to `ZooKeeper` "
-                                                                       + 
"using given config parameters.";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, zce);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(zce),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                                       
-                                               } catch 
(MasterNotRunningException mnre) {
-                                                       String msgDesc = 
"getColumnFamilyList: Looks like `Master` is not running, "
-                                                                       + "so 
couldn't check that running HBase is available or not, "
-                                                                       + 
"Please try again later.";
-                                                       HadoopException 
hdpException = new HadoopException(
-                                                                       
msgDesc, mnre);
-                                                       
hdpException.generateResponseDataMap(false,
-                                                                       
getMessage(mnre), msgDesc + errMsg,
-                                                                       null, 
null);
-                                                       throw hdpException;
-
-                                               }  catch(IOException io) {
-                                                       String msgDesc = 
"getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-                                                                       + 
"[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-                                                                       + ", 
table-match:" + columnFamilyMatching + "], "
-                                                                       + 
"current thread might not be able set the context ClassLoader.";
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, io);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(io),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException; 
-                                               } catch (SecurityException se) {
-                                                               String msgDesc 
= "getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-                                                                               
+ "[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-                                                                               
+ ", table-match:" + columnFamilyMatching + "], "
-                                                                               
+ "current thread might not be able set the context ClassLoader.";
-                                                               HadoopException 
hdpException = new HadoopException(msgDesc, se);
-                                                               
hdpException.generateResponseDataMap(false, getMessage(se),
-                                                                               
msgDesc + errMsg, null, null);
-                                                               throw 
hdpException;                                                     
-                                                       
-                                               }  catch (Throwable e) {
-                                                       String msgDesc = 
"getColumnFamilyList: Unable to get HBase ColumnFamilyList for "
-                                                                       + 
"[repository:" +getConfigHolder().getDatasourceName() + ",table:" + tableName
-                                                                       + ", 
table-match:" + columnFamilyMatching + "], "
-                                                                       + 
"current thread might not be able set the context ClassLoader.";
-                                                       LOG.error(msgDesc);
-                                                       HadoopException 
hdpException = new HadoopException(msgDesc, e);
-                                                       
hdpException.generateResponseDataMap(false, getMessage(e),
-                                                                       msgDesc 
+ errMsg, null, null);
-                                                       throw hdpException;
-                                               }
-                                               finally {
-                                                       if (admin != null) {
-                                                               try {
-                                                                       
admin.close() ;
-                                                               } catch 
(IOException e) {
-                                                                       
LOG.error("Unable to close HBase connection [" + 
getConfigHolder().getDatasourceName() + "]", e);
-                                                               }
-                                                       }
-                                               }
-                                               return colfList ;
-                                       }
-                                       
-                               }) ;
-                       } catch (SecurityException se) {
-                               String msgDesc = "getColumnFamilyList: Unable 
to connect to HBase Server instance, "
-                                               + "current thread might not be 
able set the context ClassLoader.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                               hdpException.generateResponseDataMap(false, 
getMessage(se),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } finally {
-                               
Thread.currentThread().setContextClassLoader(prevCl);
-                       }
-               }
-               return ret ;
-       }
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
 
b/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
deleted file mode 100644
index 617d2e1..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hbase/client/HBaseClientTester.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hbase.client;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-public class HBaseClientTester {
-
-       private static final Log LOG = 
LogFactory.getLog(HBaseClientTester.class) ;
-
-       public static void main(String[] args) throws Throwable {
-
-               HBaseClient hc = null;
-
-               if (args.length <= 2) {
-                       System.err.println("USAGE: java " + 
HBaseClientTester.class.getName() + " dataSourceName propertyFile <tableName> 
<columnFamilyName>");
-                       System.exit(1);
-               }
-               
-               LOG.info("Starting ...");
-
-               Properties conf = new Properties();
-               InputStream in = 
HBaseClientTester.class.getClassLoader().getResourceAsStream(args[1]) ;
-               try {
-               conf.load(in);
-               }
-               finally {
-                       if (in != null) {
-                               try {
-                               in.close();
-                               }
-                               catch(IOException ioe) {
-                                       // Ignore IOE when closing stream
-                               }
-                       }
-               }
-
-               HashMap<String, String> prop = new HashMap<String, String>();
-               for (Object key : conf.keySet()) {
-                       Object val = conf.get(key);
-                       prop.put((String) key, (String) val);
-               }
-
-               hc = new HBaseClient(args[0], prop);
-
-               if (args.length == 3) {
-                       List<String> dbList = hc.getTableList(args[2]);
-                       if (dbList.size() == 0) {
-                               System.out.println("No tables found with db 
filter [" + args[2] + "]");
-                       } else {
-                               for (String str : dbList) {
-                                       System.out.println("table: " + str);
-                               }
-                       }
-               } else if (args.length == 4) {
-                       List<String> tableList = 
hc.getColumnFamilyList(args[2], args[3]);
-                       if (tableList.size() == 0) {
-                               System.out.println("No column families found 
under table [" + args[2] + "] with columnfamily filter [" + args[3] + "]");
-                       } else {
-                               for (String str : tableList) {
-                                       System.out.println("ColumnFamily: " + 
str);
-                               }
-                       }
-               }
-
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java 
b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
deleted file mode 100644
index f8f50f8..0000000
--- a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClient.java
+++ /dev/null
@@ -1,510 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hive.client;
-
-import java.io.Closeable;
-import java.security.PrivilegedAction;
-import java.sql.Connection;
-import java.sql.Driver;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.SQLTimeoutException;
-import java.sql.Statement;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-import javax.security.auth.Subject;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.hadoop.client.config.BaseClient;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-
-public class HiveClient extends BaseClient implements Closeable {
-
-       private static final Log LOG = LogFactory.getLog(HiveClient.class) ;
-       
-       Connection con = null ;
-       boolean isKerberosAuth=false;
-       
-
-       public HiveClient(String dataSource) {
-               super(dataSource) ;
-               initHive() ;
-       }
-       
-       public HiveClient(String dataSource,HashMap<String,String> 
connectionProp) {
-               super(dataSource,connectionProp) ;
-               initHive() ;
-       }
-       
-       public void initHive() {
-               isKerberosAuth = getConfigHolder().isKerberosAuthentication();
-               if (isKerberosAuth) {
-                       LOG.info("Secured Mode: JDBC Connection done with 
preAuthenticated Subject");
-                       Subject.doAs(getLoginSubject(), new 
PrivilegedAction<Object>() {
-                               public Object run() {
-                                       initConnection();
-                                       return null;
-                               }
-                       }) ;                            
-               }
-               else {
-                       LOG.info("Since Password is NOT provided, Trying to use 
UnSecure client with username and password");
-                       final String userName = getConfigHolder().getUserName() 
;
-                       final String password = getConfigHolder().getPassword() 
;
-                       Subject.doAs(getLoginSubject(), new 
PrivilegedAction<Object>() {
-                               public Object run() {
-                                       initConnection(userName,password);
-                                       return null;
-                               }
-                       }) ;    
-               }
-       }
-       
-       public List<String> getDatabaseList(String databaseMatching){
-               final String dbMatching=databaseMatching;
-               List<String> dblist = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
-                       public List<String>  run() {
-                               return getDBList(dbMatching);
-                       }
-               }) ;
-               return dblist;
-       }
-               
-       private List<String> getDBList(String databaseMatching) {
-               List<String> ret = new ArrayList<String>() ;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (con != null) {
-                       Statement stat =  null ;
-                       ResultSet rs = null ;
-                       String sql = "show databases" ;
-                       if (databaseMatching != null && ! 
databaseMatching.isEmpty()) {
-                               sql = sql + " like \"" + databaseMatching  + 
"\"" ;
-                       }
-                       try {
-                               stat =  con.createStatement()  ;
-                               rs = stat.executeQuery(sql) ;
-                               while (rs.next()) {
-                                       ret.add(rs.getString(1)) ;
-                               }
-                       } catch (SQLTimeoutException sqlt) {
-                               String msgDesc = "Time Out, Unable to execute 
SQL [" + sql
-                                               + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqlt);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqlt),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (SQLException sqle) {
-                               String msgDesc = "Unable to execute SQL [" + 
sql + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqle);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqle),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } finally {
-                               close(rs) ;
-                               close(stat) ;
-                       }
-                       
-               }
-               return ret ;
-       }
-       
-       public List<String> getTableList(String database, String 
tableNameMatching){
-               final String db=database;
-               final String tblNameMatching=tableNameMatching;
-               List<String> tableList = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
-                       public List<String>  run() {
-                               return getTblList(db,tblNameMatching);
-                       }
-               }) ;
-               return tableList;
-       }
-
-       public List<String> getTblList(String database, String 
tableNameMatching) {
-               List<String> ret = new ArrayList<String>() ;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (con != null) {
-                       Statement stat =  null ;
-                       ResultSet rs = null ;
-                       
-                       String sql = null ;
-                       
-                       try {
-                               sql = "use " + database;
-                               
-                               try {
-                                       stat = con.createStatement() ;
-                                       stat.execute(sql) ;
-                               }
-                               finally {
-                                       close(stat) ;
-                               }
-                               
-                               sql = "show tables " ;
-                               if (tableNameMatching != null && ! 
tableNameMatching.isEmpty()) {
-                                       sql = sql + " like \"" + 
tableNameMatching  + "\"" ;
-                               }
-                               stat =  con.createStatement()  ;
-                               rs = stat.executeQuery(sql) ;
-                               while (rs.next()) {
-                                       ret.add(rs.getString(1)) ;
-                               }
-                       } catch (SQLTimeoutException sqlt) {
-                               String msgDesc = "Time Out, Unable to execute 
SQL [" + sql
-                                               + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqlt);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqlt),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (SQLException sqle) {
-                               String msgDesc = "Unable to execute SQL [" + 
sql + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqle);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqle),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } finally {
-                               close(rs) ;
-                               close(stat) ;
-                       }
-                       
-               }
-               return ret ;
-       }
-
-       public List<String> getViewList(String database, String 
viewNameMatching) {
-               List<String> ret = null ;
-               return ret ;
-       }
-
-       public List<String> getUDFList(String database, String udfMatching) {
-               List<String> ret = null ;
-               return ret ;
-       }
-       
-       public List<String> getColumnList(String database, String tableName, 
String columnNameMatching) {
-               final String db=database;
-               final String tblName=tableName;
-               final String clmNameMatching=columnNameMatching;
-               List<String> columnList = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
-                       public List<String>  run() {
-                                       return 
getClmList(db,tblName,clmNameMatching);
-                               }
-                       }) ;
-               return columnList;
-       }
-       
-       public List<String> getClmList(String database, String tableName, 
String columnNameMatching) {
-               List<String> ret = new ArrayList<String>() ;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (con != null) {
-                       
-                       String columnNameMatchingRegEx = null ;
-                       
-                       if (columnNameMatching != null && ! 
columnNameMatching.isEmpty()) {
-                               columnNameMatchingRegEx = columnNameMatching ;
-                       }
-                       
-                       Statement stat =  null ;
-                       ResultSet rs = null ;
-                       
-                       String sql = null ;
-                       
-                       try {
-                               sql = "use " + database;
-                               
-                               try {
-                                       stat = con.createStatement() ;
-                                       stat.execute(sql) ;
-                               }
-                               finally {
-                                       close(stat) ;
-                               }
-                               
-                               sql = "describe  " + tableName ;
-                               stat =  con.createStatement()  ;
-                               rs = stat.executeQuery(sql) ;
-                               while (rs.next()) {
-                                       String columnName = rs.getString(1) ;
-                                       if (columnNameMatchingRegEx == null) {
-                                               ret.add(columnName) ;
-                                       }
-                                       else if 
(FilenameUtils.wildcardMatch(columnName,columnNameMatchingRegEx)) {
-                                               ret.add(columnName) ;
-                                       }
-                               }
-                       } catch (SQLTimeoutException sqlt) {
-                               String msgDesc = "Time Out, Unable to execute 
SQL [" + sql
-                                               + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqlt);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqlt),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (SQLException sqle) {
-                               String msgDesc = "Unable to execute SQL [" + 
sql + "].";
-                               HadoopException hdpException = new 
HadoopException(msgDesc,
-                                               sqle);
-                               hdpException.generateResponseDataMap(false, 
getMessage(sqle),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } finally {
-                               close(rs) ;
-                               close(stat) ;
-                       }
-                       
-               }
-               return ret ;
-       }
-       
-       
-       public void close() {
-               Subject.doAs(getLoginSubject(), new PrivilegedAction<Void>(){
-                       public Void run() {
-                               close(con) ;
-                               return null;
-                       }
-               });
-       }
-       
-       private void close(Statement aStat) {
-               try {
-                       if (aStat != null) {
-                               aStat.close();
-                       }
-               } catch (SQLException e) {
-                       LOG.error("Unable to close SQL statement", e);
-               }
-       }
-
-       private void close(ResultSet aResultSet) {
-               try {
-                       if (aResultSet != null) {
-                               aResultSet.close();
-                       }
-               } catch (SQLException e) {
-                       LOG.error("Unable to close ResultSet", e);
-               }
-       }
-
-       private void close(Connection aCon) {
-               try {
-                       if (aCon != null) {
-                               aCon.close();
-                       }
-               } catch (SQLException e) {
-                       LOG.error("Unable to close SQL Connection", e);
-               }
-       }
-
-       private void initConnection() {
-               initConnection(null,null) ;
-       }
-
-       
-       private void initConnection(String userName, String password) {
-       
-               Properties prop = getConfigHolder().getRangerSection() ;
-               String driverClassName = 
prop.getProperty("jdbc.driverClassName") ;
-               String url =  prop.getProperty("jdbc.url") ;    
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-       
-               if (driverClassName != null) {
-                       try {
-                               Driver driver = 
(Driver)Class.forName(driverClassName).newInstance() ;
-                               DriverManager.registerDriver(driver);
-                       } catch (SQLException e) {
-                               String msgDesc = "initConnection: Caught 
SQLException while registering "
-                                               + "Hive driver, so Unable to 
connect to Hive Thrift Server instance.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, e);
-                               hdpException.generateResponseDataMap(false, 
getMessage(e),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (IllegalAccessException ilae) {
-                               String msgDesc = "initConnection: Class or its 
nullary constructor might not accessible."
-                                               + "So unable to initiate 
connection to hive thrift server instance.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, ilae);
-                               hdpException.generateResponseDataMap(false, 
getMessage(ilae),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (InstantiationException ie) {
-                               String msgDesc = "initConnection: Class may not 
have its nullary constructor or "
-                                               + "may be the instantiation 
fails for some other reason."
-                                               + "So unable to initiate 
connection to hive thrift server instance.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, ie);
-                               hdpException.generateResponseDataMap(false, 
getMessage(ie),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                               
-                       } catch (ExceptionInInitializerError eie) {
-                               String msgDesc = "initConnection: Got 
ExceptionInInitializerError, "
-                                               + "The initialization provoked 
by this method fails."
-                                               + "So unable to initiate 
connection to hive thrift server instance.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, eie);
-                               hdpException.generateResponseDataMap(false, 
getMessage(eie),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (SecurityException se) {
-                               String msgDesc = "initConnection: unable to 
initiate connection to hive thrift server instance,"
-                                               + " The caller's class loader 
is not the same as or an ancestor "
-                                               + "of the class loader for the 
current class and invocation of "
-                                               + "s.checkPackageAccess() 
denies access to the package of this class.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                               hdpException.generateResponseDataMap(false, 
getMessage(se),
-                                               msgDesc + errMsg, null, null);
-                               throw hdpException;
-                       } catch (Throwable t) {
-                               String msgDesc = "initConnection: Unable to 
connect to Hive Thrift Server instance, "
-                                               + "please provide valid value 
of field : {jdbc.driverClassName}.";
-                               HadoopException hdpException = new 
HadoopException(msgDesc, t);
-                               hdpException.generateResponseDataMap(false, 
getMessage(t),
-                                               msgDesc + errMsg, null, 
"jdbc.driverClassName");
-                               throw hdpException;
-                       }
-               }
-               
-               try {
-                       
-                       if (userName == null && password == null) {
-                               con = DriverManager.getConnection(url) ;
-                       }
-                       else {                  
-                               con = DriverManager.getConnection(url, 
userName, password) ;
-                       }
-               
-               } catch (SQLException e) {
-                       String msgDesc = "Unable to connect to Hive Thrift 
Server instance.";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, e);
-                       hdpException.generateResponseDataMap(false, 
getMessage(e), msgDesc
-                                       + errMsg, null, null);
-                       throw hdpException;
-               } catch (SecurityException se) {
-                       String msgDesc = "Unable to connect to Hive Thrift 
Server instance.";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                       hdpException.generateResponseDataMap(false, 
getMessage(se), msgDesc
-                                       + errMsg, null, null);
-                       throw hdpException;
-               }
-       }
-
-       
-       public static void main(String[] args) {
-               
-               HiveClient hc = null ;
-               
-               if (args.length == 0) {
-                       System.err.println("USAGE: java " + 
HiveClient.class.getName() + " dataSourceName <databaseName> <tableName> 
<columnName>") ;
-                       System.exit(1) ;
-               }
-               
-               try {
-                       hc = new HiveClient(args[0]) ;
-                       
-                       if (args.length == 2) {
-                               List<String> dbList = 
hc.getDatabaseList(args[1]) ;
-                               if (dbList.size() == 0) {
-                                       System.out.println("No database found 
with db filter [" + args[1] + "]") ;
-                               }
-                               else {
-                                       for (String str : dbList ) {
-                                               System.out.println("database: " 
+ str ) ;
-                                       }
-                               }
-                       }
-                       else if (args.length == 3) {
-                               List<String> tableList = 
hc.getTableList(args[1], args[2]) ;
-                               if (tableList.size() == 0) {
-                                       System.out.println("No tables found 
under database[" + args[1] + "] with table filter [" + args[2] + "]") ;
-                               }
-                               else {
-                                       for(String str : tableList) {
-                                               System.out.println("Table: " + 
str) ;
-                                       }
-                               }
-                       }
-                       else if (args.length == 4) {
-                               List<String> columnList = 
hc.getColumnList(args[1], args[2], args[3]) ;
-                               if (columnList.size() == 0) {
-                                       System.out.println("No columns found 
for db:" + args[1] + ", table: [" + args[2] + "], with column filter [" + 
args[3] + "]") ;
-                               }
-                               else {
-                                       for (String str : columnList ) {
-                                               System.out.println("Column: " + 
str) ;
-                                       }
-                               }
-                       }
-                       
-               }
-               finally {
-                       if (hc != null) {
-                               hc.close();
-                       }
-               }       
-       }
-
-       public static HashMap<String, Object> testConnection(String dataSource,
-                       HashMap<String, String> connectionProperties) {
-
-               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
-               boolean connectivityStatus = false;
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-
-               HiveClient connectionObj = new HiveClient(dataSource,
-                               connectionProperties);
-               if (connectionObj != null) {
-               
-                       List<String> testResult = 
connectionObj.getDatabaseList("*");
-                       if (testResult != null && testResult.size() != 0) {
-                               connectivityStatus = true;
-                       }
-               }
-               if (connectivityStatus) {
-                       String successMsg = "TestConnection Successful";
-                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
-                                       null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any databases 
using given parameters.";
-                       generateResponseDataMap(connectivityStatus, failureMsg, 
failureMsg + errMsg,
-                                       null, null, responseData);
-               }
-               
-               connectionObj.close();
-               return responseData;
-       }
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
 
b/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
deleted file mode 100644
index 0128622..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/hive/client/HiveClientTester.java
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
- package org.apache.ranger.hive.client;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-public class HiveClientTester  {
-
-       public static void main(String[] args) throws Throwable {
-               
-               HiveClient hc = null ;
-               
-               if (args.length <= 2) {
-                       System.err.println("USAGE: java " + 
HiveClientTester.class.getName() + " dataSourceName propertyFile <databaseName> 
<tableName> <columnName>") ;
-                       System.exit(1) ;
-               }
-               
-               
-               try {
-                       
-                       Properties conf = new Properties() ;
-                       
conf.load(HiveClientTester.class.getClassLoader().getResourceAsStream(args[1]));
-                       
-                       HashMap<String,String> prop = new 
HashMap<String,String>() ;
-                       for(Object key : conf.keySet()) {
-                               Object val = conf.get(key) ;
-                               prop.put((String)key, (String)val) ;
-                       }
-
-                       
-                       hc = new HiveClient(args[0], prop) ;
-                       
-                       
-                       if (args.length == 3) {
-                               List<String> dbList = 
hc.getDatabaseList(args[2]) ;
-                               if (dbList.size() == 0) {
-                                       System.out.println("No database found 
with db filter [" + args[2] + "]") ;
-                               }
-                               else {
-                                       for (String str : dbList ) {
-                                               System.out.println("database: " 
+ str ) ;
-                                       }
-                               }
-                       }
-                       else if (args.length == 4) {
-                               List<String> tableList = 
hc.getTableList(args[2], args[3]) ;
-                               if (tableList.size() == 0) {
-                                       System.out.println("No tables found 
under database[" + args[2] + "] with table filter [" + args[3] + "]") ;
-                               }
-                               else {
-                                       for(String str : tableList) {
-                                               System.out.println("Table: " + 
str) ;
-                                       }
-                               }
-                       }
-                       else if (args.length == 5) {
-                               List<String> columnList = 
hc.getColumnList(args[2], args[3], args[4]) ;
-                               if (columnList.size() == 0) {
-                                       System.out.println("No columns found 
for db:" + args[2] + ", table: [" + args[3] + "], with column filter [" + 
args[4] + "]") ;
-                               }
-                               else {
-                                       for (String str : columnList ) {
-                                               System.out.println("Column: " + 
str) ;
-                                       }
-                               }
-                       }
-                       
-               }
-               finally {
-                       if (hc != null) {
-                               hc.close();
-                       }
-               }
-               
-       }
-       
-
-}

Reply via email to