Repository: incubator-ranger
Updated Branches:
  refs/heads/master 073577dd7 -> 46895de3a


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java 
b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java
deleted file mode 100644
index 9137706..0000000
--- a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClient.java
+++ /dev/null
@@ -1,386 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.knox.client;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.ranger.hadoop.client.config.BaseClient;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.map.ObjectMapper;
-
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.api.client.filter.HTTPBasicAuthFilter;
-
-public class KnoxClient {
-
-       private static final String EXPECTED_MIME_TYPE = "application/json";
-       private static final Log LOG = LogFactory.getLog(KnoxClient.class);
-
-       private String knoxUrl;
-       private String userName;
-       private String password;
-       
-       /*
-   Sample curl calls to Knox to discover topologies
-        curl -ivk -u admin:admin-password 
https://localhost:8443/gateway/admin/api/v1/topologies
-        curl -ivk -u admin:admin-password 
https://localhost:8443/gateway/admin/api/v1/topologies/admin
-       */
-       
-       public KnoxClient(String knoxUrl, String userName, String password) {
-               LOG.debug("Constructed KnoxClient with knoxUrl: " + knoxUrl +
-                               ", userName: " + userName);
-               this.knoxUrl = knoxUrl;
-               this.userName = userName;
-               this.password = password;
-       }
-
-       public  List<String> getTopologyList(String topologyNameMatching) {
-               
-               // sample URI: 
https://hdp.example.com:8443/gateway/admin/api/v1/topologies
-               LOG.debug("Getting Knox topology list for topologyNameMatching 
: " +
-                               topologyNameMatching);
-               List<String> topologyList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if ( topologyNameMatching == null ||  
topologyNameMatching.trim().isEmpty()) {
-                       topologyNameMatching = "";
-               }
-               try {
-
-                       Client client = null;
-                       ClientResponse response = null;
-
-                       try {
-                               client = Client.create();;
-                               
-                               client.addFilter(new 
HTTPBasicAuthFilter(userName, password));
-                               WebResource webResource = 
client.resource(knoxUrl);
-                               response = 
webResource.accept(EXPECTED_MIME_TYPE)
-                                           .get(ClientResponse.class);
-                               LOG.debug("Knox topology list response: " + 
response);
-                               if (response != null) {
-
-                                       if (response.getStatus() == 200) {
-                                               String jsonString = 
response.getEntity(String.class);
-                                               LOG.debug("Knox topology list 
response JSON string: "+ jsonString);
-                                               
-                                               ObjectMapper objectMapper = new 
ObjectMapper();
-                                               
-                                               JsonNode rootNode = 
objectMapper.readTree(jsonString);
-                                               JsonNode topologyNode = 
rootNode.findValue("topology");
-                                               if (topologyNode == null) {
-                                                       return topologyList;
-                                               }
-                                               Iterator<JsonNode> elements = 
topologyNode.getElements();
-                                               while (elements.hasNext()) {
-                                                       JsonNode element = 
elements.next();
-                                                       if (element != null) {
-                                                               String 
topologyName = element.get("name").getValueAsText();
-                                                               
LOG.debug("Found Knox topologyName: " + topologyName);
-                                                               if 
(topologyName != null && topologyName.startsWith(topologyNameMatching)) {
-                                                                       
topologyList.add(topologyName);
-                                                               }
-                                                       }
-                                               }
-                                       } else {
-                                               LOG.error("Got invalid  REST 
response from: "+ knoxUrl + ", responsStatus: " + response.getStatus());
-                                       }
-
-                               } else {
-                                       String msgDesc = "Unable to get a valid 
response for "
-                                                       + "isFileChanged() call 
for KnoxUrl : [" + knoxUrl
-                                                       + "] - got null 
response.";
-                                       LOG.error(msgDesc);
-                                       HadoopException hdpException = new 
HadoopException(msgDesc);
-                                       
hdpException.generateResponseDataMap(false, msgDesc,
-                                                       msgDesc + errMsg, null, 
null);
-                                       throw hdpException;
-                               }
-
-                       } finally {
-                               if (response != null) {
-                                       response.close();
-                               }
-                               if (client != null) {
-                                       client.destroy();
-                               }
-                       }
-               } catch (HadoopException he) {
-                       throw he;
-               } catch (Throwable t) {
-                       String msgDesc = "Exception on REST call to KnoxUrl : " 
+ knoxUrl + ".";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, t);
-                       LOG.error(msgDesc, t);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(t), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-               }
-               return topologyList;
-       }
-
-       
-       public List<String> getServiceList(String topologyName, String 
serviceNameMatching) {
-               
-               // sample URI: .../admin/api/v1/topologies/<topologyName>
-               
-               List<String> serviceList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if ( serviceNameMatching == null ||  
serviceNameMatching.trim().isEmpty()) {
-                       serviceNameMatching = "";
-               }
-               try {
-
-                       Client client = null;
-                       ClientResponse response = null;
-
-                       try {
-                               client = Client.create();;
-                               
-                               client.addFilter(new 
HTTPBasicAuthFilter(userName, password));
-                               
-                               WebResource webResource = 
client.resource(knoxUrl + "/" + topologyName);
-                               
-                               response = 
webResource.accept(EXPECTED_MIME_TYPE)
-                                           .get(ClientResponse.class);
-                               LOG.debug("Knox service lookup response: " + 
response);
-                               if (response != null) {
-                                       
-                                       if (response.getStatus() == 200) {
-                                               String jsonString = 
response.getEntity(String.class);
-                                               LOG.debug("Knox service look up 
response JSON string: " + jsonString);
-                                               
-                                               ObjectMapper objectMapper = new 
ObjectMapper();
-                                               
-                                               JsonNode rootNode = 
objectMapper.readTree(jsonString);
-                                               JsonNode topologyNode = 
rootNode.findValue("topology");
-                                               if (topologyNode != null) {
-                                                       JsonNode servicesNode = 
topologyNode.get("services");
-                                                       Iterator<JsonNode> 
services = servicesNode.getElements();
-                                                       while 
(services.hasNext()) {
-                                                               JsonNode 
service = services.next();
-                                                               String 
serviceName = service.get("role").getValueAsText();
-                                                               LOG.debug("Knox 
serviceName: " + serviceName);
-                                                               if (serviceName 
!= null && serviceName.startsWith(serviceNameMatching)) {
-                                                                       
serviceList.add(serviceName);
-                                                               }
-                                                       }
-                                               }
-                                       } else {
-                                               LOG.error("Got invalid  REST 
response from: "+ knoxUrl + ", responsStatus: " + response.getStatus());
-                                       }
-
-                               } else {
-                                       String msgDesc = "Unable to get a valid 
response for "
-                                                       + "isFileChanged() call 
for KnoxUrl : [" + knoxUrl
-                                                       + "] - got null 
response.";
-                                       LOG.error(msgDesc);
-                                       HadoopException hdpException = new 
HadoopException(msgDesc);
-                                       
hdpException.generateResponseDataMap(false, msgDesc,
-                                                       msgDesc + errMsg, null, 
null);
-                                       throw hdpException;
-                               }
-
-                       } finally {
-                               if (response != null) {
-                                       response.close();
-                               }
-                               if (client != null) {
-                                       client.destroy();
-                               }
-                       }
-               } catch (HadoopException he) {
-                       throw he;
-               } catch (Throwable t) {
-                       String msgDesc = "Exception on REST call to KnoxUrl : " 
+ knoxUrl + ".";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, t);
-                       LOG.error(msgDesc, t);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(t), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-
-               }
-               return serviceList;
-       }
-
-       public static void main(String[] args) {
-
-               KnoxClient knoxClient = null;
-
-               if (args.length != 3) {
-                       System.err.println("USAGE: java " + 
KnoxClient.class.getName()
-                                       + " knoxUrl userName password 
[sslConfigFileName]");
-                       System.exit(1);
-               }
-
-               knoxClient = new KnoxClient(args[0], args[1], args[2]);
-               List<String> topologyList = knoxClient.getTopologyList("");
-               if ((topologyList == null) || topologyList.isEmpty()) {
-                       System.out.println("No knox topologies found");
-               } else {
-                       for (String topology : topologyList) {
-                               System.out.println("Found Topology: " + 
topology);
-                               List<String> serviceList = 
knoxClient.getServiceList(topology, "");
-                               if ((serviceList == null) || 
serviceList.isEmpty()) {
-                                       System.out.println("No services found 
for knox topology: " + topology);
-                               } else {
-                                       for (String service : serviceList) {
-                                               System.out.println("    Found 
service for topology: " + service +", " + topology);
-                                       }
-                               }
-                       }
-               }
-       }
-       
-       public static HashMap<String, Object> testConnection(String dataSource,
-                       HashMap<String, String> connectionProperties) {
-
-               List<String> strList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               boolean connectivityStatus = false;
-               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
-
-               KnoxClient knoxClient = getKnoxClient(dataSource, 
connectionProperties);
-               strList = getKnoxResources(knoxClient, "", null);
-
-               if (strList != null && (strList.size() != 0)) {
-                       connectivityStatus = true;
-               }
-               
-               if (connectivityStatus) {
-                       String successMsg = "TestConnection Successful";
-                       BaseClient.generateResponseDataMap(connectivityStatus, 
successMsg, successMsg,
-                                       null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any 
topologies/services using given parameters.";
-                       BaseClient.generateResponseDataMap(connectivityStatus, 
failureMsg, failureMsg + errMsg,
-                                       null, null, responseData);
-               }
-               
-               return responseData;
-       }
-
-       public static KnoxClient getKnoxClient(String dataSourceName,
-                       Map<String, String> configMap) {
-               KnoxClient knoxClient = null;
-               LOG.debug("Getting knoxClient for datasource: " + dataSourceName
-                               + "configMap: " + configMap);
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (configMap == null || configMap.isEmpty()) {
-                       String msgDesc = "Could not connect as Connection 
ConfigMap is empty.";
-                       LOG.error(msgDesc);
-                       HadoopException hdpException = new 
HadoopException(msgDesc);
-                       hdpException.generateResponseDataMap(false, msgDesc, 
msgDesc + errMsg, null,
-                                       null);
-                       throw hdpException;
-               } else {
-                       String knoxUrl = configMap.get("knox.url");
-                       String knoxAdminUser = configMap.get("username");
-                       String knoxAdminPassword = configMap.get("password");
-                       knoxClient = new KnoxClient(knoxUrl, knoxAdminUser,
-                                       knoxAdminPassword);
-               }
-               return knoxClient;
-       }
-
-       public static List<String> getKnoxResources(final KnoxClient knoxClient,
-                       String topologyName, String serviceName) {
-
-               List<String> resultList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-
-               try {
-                       if (knoxClient == null) {
-                               // LOG.error("Unable to get knox resources: 
knoxClient is null");
-                               // return new ArrayList<String>();
-                               String msgDesc = "Unable to get knox resources: 
knoxClient is null.";
-                               LOG.error(msgDesc);
-                               HadoopException hdpException = new 
HadoopException(msgDesc);
-                               hdpException.generateResponseDataMap(false, 
msgDesc, msgDesc + errMsg,
-                                               null, null);
-                               throw hdpException;
-                       }
-
-                       final Callable<List<String>> callableObj;
-                       if (serviceName != null) {
-                               final String finalServiceNameMatching = 
serviceName.trim();
-                               final String finalTopologyName = topologyName;
-                               callableObj = new Callable<List<String>>() {
-                                       @Override
-                                       public List<String> call() {
-                                               return 
knoxClient.getServiceList(finalTopologyName,
-                                                               
finalServiceNameMatching);
-                                       }
-                               };
-
-                       } else {
-                               final String finalTopologyNameMatching = 
(topologyName == null) ? ""
-                                               : topologyName.trim();
-                               callableObj = new Callable<List<String>>() {
-                                       @Override
-                                       public List<String> call() {
-                                               return knoxClient
-                                                               
.getTopologyList(finalTopologyNameMatching);
-                                       }
-                               };
-                       }
-                       resultList = timedTask(callableObj, 5, 
TimeUnit.SECONDS);
-
-               } catch (HadoopException he) {
-                       throw he;
-               } catch (Exception e) {
-                       String msgDesc = "Unable to get knox resources.";
-                       LOG.error(msgDesc, e);
-                       HadoopException hdpException = new 
HadoopException(msgDesc);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(e), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-               }
-
-               return resultList;
-       }
-
-       public static <T> T timedTask(Callable<T> callableObj, long timeout,
-                       TimeUnit timeUnit) throws Exception {
-               return callableObj.call();
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java 
b/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java
deleted file mode 100644
index dd6acbc..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/knox/client/KnoxClientTest.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.knox.client;
-
-public class KnoxClientTest  {
-       
-       
-       /*
-   Sample curl calls to knox REST API to discover topologies
-        curl -ivk -u admin:admin-password 
https://localhost:8443/gateway/admin/api/v1/topologies
-        curl -ivk -u admin:admin-password 
https://localhost:8443/gateway/admin/api/v1/topologies/admin
-       */
-       
-       public static void main(String[] args) {
-               System.out.println(System.getProperty("java.class.path"));
-               System.setProperty("javax.net.ssl.trustStore", 
"/tmp/cacertswithknox)");
-               String[] testArgs = {
-                               
"https://localhost:8443/gateway/admin/api/v1/topologies";,
-                               "admin",
-                               "admin-password"
-                               };
-               KnoxClient.main(testArgs);
-       }
-       
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java
 
b/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java
deleted file mode 100644
index 763ce75..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/knox/client/RangerConstants.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ranger.knox.client;
-
-public class RangerConstants {
-       
-       // xasecure 2-way ssl configuration 
-
-       public static final String RANGER_KNOX_CLIENT_KEY_FILE                  
                          = "xasecure.knoxclient.ssl.keystore"; 
-       public static final String RANGER_KNOX_CLIENT_KEY_FILE_PASSWORD         
                  = "xasecure.knoxclien.tssl.keystore.password";        
-       public static final String RANGER_KNOX_CLIENT_KEY_FILE_TYPE             
                          = "xasecure.knoxclient.ssl.keystore.type";    
-
-       public static final String RANGER_KNOX_CLIENT_KEY_FILE_TYPE_DEFAULT     
                  = "jks";      
-
-       public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE           
                          = "xasecure.knoxclient.ssl.truststore";       
-       public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_PASSWORD  
          = "xasecure.knoxclient.ssl.truststore.password";      
-       public static final String RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE      
                  = "xasecure.knoxclient.ssl.truststore.type";  
-
-       public static final String 
RANGER_KNOX_CLIENT_TRUSTSTORE_FILE_TYPE_DEFAULT        = "jks";      
-       
-       
-       public static final String RANGER_SSL_KEYMANAGER_ALGO_TYPE              
                          = "SunX509" ;
-       public static final String RANGER_SSL_TRUSTMANAGER_ALGO_TYPE            
                          = "SunX509" ;
-       public static final String RANGER_SSL_CONTEXT_ALGO_TYPE                 
                          = "SSL" ;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java 
b/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java
deleted file mode 100644
index db736b0..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/StormClient.java
+++ /dev/null
@@ -1,395 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.storm.client;
-
-import java.io.IOException;
-import java.security.PrivilegedAction;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
-import javax.security.auth.login.LoginContext;
-import javax.security.auth.login.LoginException;
-
-import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.security.KrbPasswordSaverLoginModule;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.log4j.Logger;
-import org.apache.ranger.hadoop.client.config.BaseClient;
-import org.apache.ranger.hadoop.client.exceptions.HadoopException;
-import org.apache.ranger.storm.client.json.model.Topology;
-import org.apache.ranger.storm.client.json.model.TopologyListResponse;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-
-public class StormClient {
-       
-       public static final Logger LOG = Logger.getLogger(StormClient.class) ;
-
-       private static final String EXPECTED_MIME_TYPE = "application/json";
-       
-       private static final String TOPOLOGY_LIST_API_ENDPOINT = 
"/api/v1/topology/summary" ;
-       
-
-       String stormUIUrl;
-       String userName;
-       String password;
-
-       public StormClient(String aStormUIUrl, String aUserName, String 
aPassword) {
-               
-               this.stormUIUrl = aStormUIUrl;
-               this.userName = aUserName ;
-               this.password = aPassword;
-               
-               if (LOG.isDebugEnabled()) {
-                       LOG.debug("Storm Client is build with url [" + 
aStormUIUrl + "] user: [" + aUserName + "], password: [" + "" + "]");
-               }
-
-       }
-
-       public List<String> getTopologyList(final String topologyNameMatching) {
-               
-               LOG.debug("Getting Storm topology list for topologyNameMatching 
: " +
-                               topologyNameMatching);
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               
-               List<String> ret = new ArrayList<String>();
-               
-               PrivilegedAction<ArrayList<String>> topologyListGetter = new 
PrivilegedAction<ArrayList<String>>() {
-                       @Override
-                       public ArrayList<String> run() {
-                               
-                               ArrayList<String> lret = new 
ArrayList<String>();
-                               
-                               String url = stormUIUrl + 
TOPOLOGY_LIST_API_ENDPOINT ;
-                               
-                               Client client = null ;
-                               ClientResponse response = null ;
-                               
-                               try {
-                                       client = Client.create() ;
-                                       
-                                       WebResource webResource = 
client.resource(url);
-                                       
-                                       response = 
webResource.accept(EXPECTED_MIME_TYPE)
-                                                   .get(ClientResponse.class);
-                                       
-                                       LOG.debug("getTopologyList():calling " 
+ url);
-                                       
-                                       if (response != null) {
-                                               
LOG.debug("getTopologyList():response.getStatus()= " + response.getStatus());   
-                                               if (response.getStatus() == 
200) {
-                                                       String jsonString = 
response.getEntity(String.class);
-                                                       Gson gson = new 
GsonBuilder().setPrettyPrinting().create();
-                                                       TopologyListResponse 
topologyListResponse = gson.fromJson(jsonString, TopologyListResponse.class);
-                                                       if 
(topologyListResponse != null) {
-                                                               if 
(topologyListResponse.getTopologyList() != null) {
-                                                                       
for(Topology topology : topologyListResponse.getTopologyList()) {
-                                                                               
String toplogyName = topology.getName() ;
-                                                                               
LOG.debug("getTopologyList():Found topology " + toplogyName);
-                                                                               
LOG.debug("getTopologyList():topology Name=[" + topology.getName() + "], 
topologyNameMatching=[" + topologyNameMatching + "]");
-                                                                               
if (toplogyName != null) {
-                                                                               
        if (topologyNameMatching == null || topologyNameMatching.isEmpty() || 
FilenameUtils.wildcardMatch(topology.getName(), topologyNameMatching + "*")) {
-                                                                               
                LOG.debug("getTopologyList():Adding topology " + toplogyName);
-                                                                               
                lret.add(toplogyName) ;
-                                                                               
        }
-                                                                               
}
-                                                                       }
-                                                               }
-                                                       }
-                                               } else{
-                                                       
LOG.info("getTopologyList():response.getStatus()= " + response.getStatus() + " 
for URL " + url + ", so returning null list");   
-                                                       String jsonString = 
response.getEntity(String.class);
-                                                       LOG.info(jsonString);
-                                                       lret = null;
-                                               }
-                                       } else {
-                                               String msgDesc = "Unable to get 
a valid response for "
-                                                               + "expected 
mime type : [" + EXPECTED_MIME_TYPE
-                                                               + "] URL : " + 
url + " - got null response.";
-                                               LOG.error(msgDesc);
-                                               HadoopException hdpException = 
new HadoopException(msgDesc);
-                                               
hdpException.generateResponseDataMap(false, msgDesc,
-                                                               msgDesc + 
errMsg, null, null);
-                                               throw hdpException;
-                                       }
-                               } catch (HadoopException he) {
-                                       throw he;
-                               } catch (Throwable t) {
-                                       String msgDesc = "Exception while 
getting Storm TopologyList."
-                                                       + " URL : " + url;
-                                       HadoopException hdpException = new 
HadoopException(msgDesc,
-                                                       t);
-                                       LOG.error(msgDesc, t);
-
-                                       
hdpException.generateResponseDataMap(false,
-                                                       
BaseClient.getMessage(t), msgDesc + errMsg, null,
-                                                       null);
-                                       throw hdpException;
-                                       
-                               } finally {
-                                       if (response != null) {
-                                               response.close();
-                                       }
-                                       
-                                       if (client != null) {
-                                               client.destroy(); 
-                                       }
-                               
-                               }
-                               return lret ;
-                       }
-               } ;
-               
-               try {
-                       ret = executeUnderKerberos(this.userName, 
this.password, topologyListGetter) ;
-               } catch (IOException e) {
-                       LOG.error("Unable to get Topology list from [" + 
stormUIUrl + "]", e) ;
-               }
-               
-               return ret;
-       }
-       
-       public static <T> T executeUnderKerberos(String userName, String 
password,
-                       PrivilegedAction<T> action) throws IOException {
-               
-               final String errMsg = " You can still save the repository and 
start creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               class MySecureClientLoginConfiguration extends
-                               javax.security.auth.login.Configuration {
-
-                       private String userName;
-                       private String password ;
-
-                       MySecureClientLoginConfiguration(String aUserName,
-                                       String password) {
-                               this.userName = aUserName;
-                               this.password = password;
-                       }
-
-                       @Override
-                       public AppConfigurationEntry[] getAppConfigurationEntry(
-                                       String appName) {
-
-                               Map<String, String> kerberosOptions = new 
HashMap<String, String>();
-                               kerberosOptions.put("principal", this.userName);
-                               kerberosOptions.put("debug", "true");
-                               kerberosOptions.put("useKeyTab", "false");
-                               
kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, this.userName);
-                               
kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, this.password);
-                               kerberosOptions.put("doNotPrompt", "false");
-                               kerberosOptions.put("useFirstPass", "true");
-                               kerberosOptions.put("tryFirstPass", "false");
-                               kerberosOptions.put("storeKey", "true");
-                               kerberosOptions.put("refreshKrb5Config", 
"true");
-
-                               AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = 
null;
-                               AppConfigurationEntry KERBEROS_PWD_SAVER = null;
-                               try {
-                                       KEYTAB_KERBEROS_LOGIN = new 
AppConfigurationEntry(
-                                                       
KerberosUtil.getKrb5LoginModuleName(),
-                                                       
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
-                                                       kerberosOptions);
-                                       KERBEROS_PWD_SAVER = new 
AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), 
LoginModuleControlFlag.REQUIRED, kerberosOptions);
-
-                               } catch (IllegalArgumentException e) {
-                                       String msgDesc = "executeUnderKerberos: 
Exception while getting Storm TopologyList.";
-                                       HadoopException hdpException = new 
HadoopException(msgDesc,
-                                                       e);
-                                       LOG.error(msgDesc, e);
-
-                                       
hdpException.generateResponseDataMap(false,
-                                                       
BaseClient.getMessage(e), msgDesc + errMsg, null,
-                                                       null);
-                                       throw hdpException;
-                               }
-                
-                               LOG.debug("getAppConfigurationEntry():" + 
kerberosOptions.get("principal"));
-                               
-                return new AppConfigurationEntry[] { KERBEROS_PWD_SAVER, 
KEYTAB_KERBEROS_LOGIN };
-                       }
-
-               };
-
-               T ret = null;
-
-               Subject subject = null;
-               LoginContext loginContext = null;
-
-               try {
-                   subject = new Subject();
-                       LOG.debug("executeUnderKerberos():user=" + userName + 
",pass=");
-                       LOG.debug("executeUnderKerberos():Creating config..");
-                       MySecureClientLoginConfiguration loginConf = new 
MySecureClientLoginConfiguration(
-                                       userName, password);
-                       LOG.debug("executeUnderKerberos():Creating Context..");
-                       loginContext = new 
LoginContext("hadoop-keytab-kerberos", subject,
-                                       null, loginConf);
-                       
-                       LOG.debug("executeUnderKerberos():Logging in..");
-                       loginContext.login();
-
-                       Subject loginSubj = loginContext.getSubject();
-
-                       if (loginSubj != null) {
-                               ret = Subject.doAs(loginSubj, action);
-                       }
-               } catch (LoginException le) {
-                       String msgDesc = "executeUnderKerberos: Login failure 
using given"
-                                       + " configuration parameters, username 
: `" + userName + "`.";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, le);
-                       LOG.error(msgDesc, le);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(le), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-               } catch (SecurityException se) {
-                       String msgDesc = "executeUnderKerberos: Exception while 
getting Storm TopologyList.";
-                       HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                       LOG.error(msgDesc, se);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(se), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-
-               } finally {
-                       if (loginContext != null) {
-                               if (subject != null) {
-                                       try {
-                                               loginContext.logout();
-                                       } catch (LoginException e) {
-                                               throw new IOException("logout 
failure", e);
-                                       }
-                               }
-                       }
-               }
-
-               return ret;
-       }
-
-       public static HashMap<String, Object> testConnection(String dataSource,
-                       HashMap<String, String> connectionProperties) {
-
-               List<String> strList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               boolean connectivityStatus = false;
-               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
-
-               StormClient stormClient = getStormClient(dataSource,
-                               connectionProperties);
-               strList = getStormResources(stormClient, "");
-
-               if (strList != null) {
-                       connectivityStatus = true;
-               }
-
-               if (connectivityStatus) {
-                       String successMsg = "TestConnection Successful";
-                       BaseClient.generateResponseDataMap(connectivityStatus, 
successMsg,
-                                       successMsg, null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any topologies 
using given parameters.";
-                       BaseClient.generateResponseDataMap(connectivityStatus, 
failureMsg,
-                                       failureMsg + errMsg, null, null, 
responseData);
-               }
-
-               return responseData;
-       }
-
-       public static StormClient getStormClient(String dataSourceName,
-                       Map<String, String> configMap) {
-               StormClient stormClient = null;
-               LOG.debug("Getting StormClient for datasource: " + 
dataSourceName
-                               + "configMap: " + configMap);
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-               if (configMap == null || configMap.isEmpty()) {
-                       String msgDesc = "Could not connect as Connection 
ConfigMap is empty.";
-                       LOG.error(msgDesc);
-                       HadoopException hdpException = new 
HadoopException(msgDesc);
-                       hdpException.generateResponseDataMap(false, msgDesc, 
msgDesc
-                                       + errMsg, null, null);
-                       throw hdpException;
-               } else {
-                       String stormUrl = configMap.get("nimbus.url");
-                       String stormAdminUser = configMap.get("username");
-                       String stormAdminPassword = configMap.get("password");
-                       stormClient = new StormClient(stormUrl, stormAdminUser,
-                                       stormAdminPassword);
-               }
-               return stormClient;
-       }
-
-       public static List<String> getStormResources(final StormClient 
stormClient,
-                       String topologyName) {
-
-               List<String> resultList = new ArrayList<String>();
-               String errMsg = " You can still save the repository and start 
creating "
-                               + "policies, but you would not be able to use 
autocomplete for "
-                               + "resource names. Check xa_portal.log for more 
info.";
-
-               try {
-                       if (stormClient == null) {
-                               String msgDesc = "Unable to get Storm 
resources: StormClient is null.";
-                               LOG.error(msgDesc);
-                               HadoopException hdpException = new 
HadoopException(msgDesc);
-                               hdpException.generateResponseDataMap(false, 
msgDesc, msgDesc
-                                               + errMsg, null, null);
-                               throw hdpException;
-                       }
-
-                       if (topologyName != null) {
-                               String finalTopologyNameMatching = 
(topologyName == null) ? ""
-                                               : topologyName.trim();
-                               resultList = stormClient
-                                               
.getTopologyList(finalTopologyNameMatching);
-                               if (resultList != null) {
-                                       LOG.debug("Returning list of " + 
resultList.size() + " topologies");
-                               }
-                       }
-               } catch (HadoopException he) {
-                       throw he;
-               } catch (Exception e) {
-                       String msgDesc = "getStormResources: Unable to get 
Storm resources.";
-                       LOG.error(msgDesc, e);
-                       HadoopException hdpException = new 
HadoopException(msgDesc);
-
-                       hdpException.generateResponseDataMap(false,
-                                       BaseClient.getMessage(e), msgDesc + 
errMsg, null, null);
-                       throw hdpException;
-               }
-               return resultList;
-       }
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java
 
b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java
deleted file mode 100644
index 4abf1e7..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/Topology.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.storm.client.json.model;
-
-public class Topology {
-       private String id ;
-       private String name ;
-       private String status ;
-       
-       public String getId() {
-               return id;
-       }
-       public void setId(String id) {
-               this.id = id;
-       }
-       public String getName() {
-               return name;
-       }
-       public void setName(String name) {
-               this.name = name;
-       }
-       public String getStatus() {
-               return status;
-       }
-       public void setStatus(String status) {
-               this.status = status;
-       }
-       
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java
----------------------------------------------------------------------
diff --git 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java
 
b/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java
deleted file mode 100644
index 14d641d..0000000
--- 
a/lookup-client/src/main/java/org/apache/ranger/storm/client/json/model/TopologyListResponse.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.storm.client.json.model;
-
-import java.util.List;
-
-import com.google.gson.annotations.SerializedName;
-
-public class TopologyListResponse {
-       @SerializedName("topologies")
-       private List<Topology>  topologyList;
-
-       public List<Topology> getTopologyList() {
-               return topologyList;
-       }
-
-       public void setTopologyList(List<Topology> topologyList) {
-               this.topologyList = topologyList;
-       }
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/log4j.xml
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/log4j.xml 
b/lookup-client/src/scripts/log4j.xml
deleted file mode 100644
index 9ee3643..0000000
--- a/lookup-client/src/scripts/log4j.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/"; 
debug="true">
-
-   <appender name="logFile" class="org.apache.log4j.DailyRollingFileAppender">
-        <param name="file" value="audit-test.log" />
-        <param name="DatePattern" value="'.'yyyy-MM-dd" />
-        <layout class="org.apache.log4j.PatternLayout">
-               <param name="ConversionPattern" value="%d{dd MMM yyyy HH:mm:ss} 
%5p %c{1} [%t] - %m%n"/>
-        </layout>
-   </appender>
-
-  <appender name="console" class="org.apache.log4j.ConsoleAppender"> 
-    <param name="Target" value="System.out"/> 
-    <layout class="org.apache.log4j.PatternLayout"> 
-       <param name="ConversionPattern" value="%d{dd MMM yyyy HH:mm:ss} %5p 
%c{1} [%t] - %m%n"/>
-    </layout> 
-  </appender> 
-
-  <root> 
-    <priority value ="info" /> 
-    <appender-ref ref="console" /> 
-    <appender-ref ref="logFile" /> 
-  </root>
-  
-</log4j:configuration>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/run-audit-test.sh
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/run-audit-test.sh 
b/lookup-client/src/scripts/run-audit-test.sh
deleted file mode 100755
index a9d650d..0000000
--- a/lookup-client/src/scripts/run-audit-test.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-HADOOP_DIR=/usr/hdp/current/hadoop-client
-HADOOP_LIB_DIR=/usr/hdp/current/hadoop-client/lib
-HADOOP_CONF_DIR=/etc/hadoop/conf
-
-cp=./ranger-plugins-audit-0.4.0.jar
-for jar in $HADOOP_CONF_DIR $HADOOP_LIB_DIR/commons-logging-1.1.3.jar 
$HADOOP_LIB_DIR/log4j-1.2.17.jar $HADOOP_LIB_DIR/eclipselink-2.5.2-M1.jar 
$HADOOP_LIB_DIR/gson-2.2.4.jar $HADOOP_LIB_DIR/javax.persistence-2.1.0.jar 
$HADOOP_LIB_DIR/mysql-connector-java.jar $HADOOP_DIR/hadoop-common.jar
-do
-  cp=${cp}:${jar}
-done
-
-export cp
-
-java -Xmx1024M -Xms1024M -cp "${cp}" org.apache.ranger.audit.test.TestEvents $*

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/run-hadoop-client.sh
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/run-hadoop-client.sh 
b/lookup-client/src/scripts/run-hadoop-client.sh
deleted file mode 100644
index 03aff46..0000000
--- a/lookup-client/src/scripts/run-hadoop-client.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-case $# in
-4 )
-    java -cp "./dist/*:./lib/hadoop/*:./conf:." 
org.apache.ranger.hadoop.client.HadoopFSTester  "${1}" "${2}" "${3}" "${4}" ;;
-* )
-    java -cp "./dist/*:./lib/hadoop/*:./conf:." 
org.apache.ranger.hadoop.client.HadoopFSTester   ;;
-esac
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/run-hbase-client.sh
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/run-hbase-client.sh 
b/lookup-client/src/scripts/run-hbase-client.sh
deleted file mode 100644
index 1c9cdb4..0000000
--- a/lookup-client/src/scripts/run-hbase-client.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-cp="./dist/*:./conf/:.:./lib/hadoop/*:./lib/hive/*:./lib/hbase/*"
-
-case $# in
-2 )
-java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester  
"${1}" "${2}" ;;
-3 )
-java  ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester  
"${1}" "${2}" "${3}" ;;
-4 )
-java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester  
"${1}" "${2}" "${3}" "${4}" ;;
-* )
-java ${JOPTS} -cp "${cp}" org.apache.ranger.hbase.client.HBaseClientTester;;
-esac
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/run-hive-client.sh
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/run-hive-client.sh 
b/lookup-client/src/scripts/run-hive-client.sh
deleted file mode 100644
index 4e80d10..0000000
--- a/lookup-client/src/scripts/run-hive-client.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-case $# in
-2 )
-       java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:."  
org.apache.ranger.hive.client.HiveClientTester "$1" "${2}"  ;;
-3 )
-       java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:."  
org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" ;;
-4 )
-       java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:."  
org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" ;;
-5 )
-       java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:."  
org.apache.ranger.hive.client.HiveClientTester "$1" "${2}" "${3}" "${4}" "${5}" 
;;
-* )
-       java -cp "./dist/*:./lib/hadoop/*:./lib/hive/*:./conf:."  
org.apache.ranger.hive.client.HiveClientTester  ;;
-esac

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/46895de3/lookup-client/src/scripts/xasecure-audit.properties
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/xasecure-audit.properties 
b/lookup-client/src/scripts/xasecure-audit.properties
deleted file mode 100644
index 055fd69..0000000
--- a/lookup-client/src/scripts/xasecure-audit.properties
+++ /dev/null
@@ -1,55 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-xasecure.audit.test.event.count=1000
-xasecure.audit.test.event.pause.time.ms=100
-xasecure.audit.test.sleep.time.before.exit.seconds=60
-
-
-xasecure.audit.is.enabled=true
-
-
-xasecure.audit.log4j.is.enabled=false
-xasecure.audit.log4j.is.async=false
-xasecure.audit.log4j.async.max.queue.size=100000
-xasecure.audit.log4j.async.max.flush.interval.ms=30000
-
-
-xasecure.audit.db.is.enabled=false
-xasecure.audit.db.is.async=true
-xasecure.audit.db.async.max.queue.size=102400
-xasecure.audit.db.async.max.flush.interval.ms=30000
-xasecure.audit.db.batch.size=100
-xasecure.audit.jpa.javax.persistence.jdbc.url=jdbc:mysql://localhost:3306/xasecure
-xasecure.audit.jpa.javax.persistence.jdbc.user=xalogger
-xasecure.audit.jpa.javax.persistence.jdbc.password=xalogger
-xasecure.audit.jpa.javax.persistence.jdbc.driver=com.mysql.jdbc.Driver
-
-
-xasecure.audit.hdfs.is.enabled=false
-xasecure.audit.hdfs.is.async=true
-xasecure.audit.hdfs.async.max.queue.size=102400
-xasecure.audit.hdfs.async.max.flush.interval.ms=30000
-xasecure.audit.hdfs.config.destination.directory=hdfs://%hostname%:8020/tmp/audit/hdfs/%time:yyyyMMdd%
-xasecure.audit.hdfs.config.destination.file=%hostname%-audit.log
-xasecure.audit.hdfs.config.destination.flush.interval.seconds=900
-xasecure.audit.hdfs.config.destination.rollover.interval.seconds=86400
-xasecure.audit.hdfs.config.destination.open.retry.interval.seconds=60
-xasecure.audit.hdfs.config.local.buffer.directory=/tmp/audit/hdfs
-xasecure.audit.hdfs.config.local.buffer.file=%time:yyyyMMdd-HHmm.ss%.log
-xasecure.audit.hdfs.config.local.buffer.flush.interval.seconds=60
-xasecure.audit.hdfs.config.local.buffer.rollover.interval.seconds=600
-xasecure.audit.hdfs.config.local.archive.directory=/tmp/audit/archive/hdfs
-xasecure.audit.hdfs.config.local.archive.max.file.count=10

Reply via email to