Repository: incubator-ranger
Updated Branches:
  refs/heads/master 1fe2d3ec6 -> 3a21f7449


RANGER-878:Improve error logging and Ranger UI error message when test 
connection and lookup is done


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/3a21f744
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/3a21f744
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/3a21f744

Branch: refs/heads/master
Commit: 3a21f74494ad253afb7919939491258a52a4d655
Parents: 1fe2d3e
Author: rmani <rm...@hortonworks.com>
Authored: Mon Mar 14 15:22:56 2016 -0700
Committer: rmani <rm...@hortonworks.com>
Committed: Mon Mar 14 15:22:56 2016 -0700

----------------------------------------------------------------------
 .../apache/ranger/plugin/client/BaseClient.java |   6 +-
 .../ranger/plugin/client/HadoopException.java   |  25 ++
 .../services/hbase/RangerServiceHBase.java      |   3 +-
 .../services/hbase/client/HBaseClient.java      |  29 ++-
 .../services/hbase/client/HBaseResourceMgr.java |   3 +-
 .../ranger/services/hdfs/RangerServiceHdfs.java |   5 +-
 .../ranger/services/hdfs/client/HdfsClient.java |  55 +++--
 .../services/hdfs/client/HdfsConnectionMgr.java |  13 +-
 .../services/hdfs/client/HdfsResourceMgr.java   |   9 +-
 .../ranger/services/hive/RangerServiceHive.java |   3 +-
 .../ranger/services/hive/client/HiveClient.java | 226 +++++++++++++------
 .../services/hive/client/HiveResourceMgr.java   |   4 +-
 .../ranger/services/knox/client/KnoxClient.java |   3 +-
 .../ranger/services/yarn/client/YarnClient.java |   3 +-
 .../java/org/apache/ranger/biz/ServiceMgr.java  |   2 +-
 .../org/apache/ranger/common/TimedExecutor.java |  15 +-
 .../services/storm/client/StormClient.java      |   3 +-
 17 files changed, 284 insertions(+), 123 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
index 0242caa..df69e2a 100644
--- 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
@@ -78,7 +78,7 @@ public abstract class BaseClient {
                                String msgDesc = "Unable to find login username 
for hadoop environment, ["
                                                + serviceName + "]";
                                HadoopException hdpException = new 
HadoopException(msgDesc);
-                               hdpException.generateResponseDataMap(false, 
msgDesc, msgDesc + errMsg,
+                               hdpException.generateResponseDataMap(false, 
msgDesc + errMsg, msgDesc + errMsg,
                                                null, null);
 
                                throw hdpException;
@@ -110,14 +110,14 @@ public abstract class BaseClient {
                                        + serviceName + "]";
 
                        HadoopException hdpException = new 
HadoopException(msgDesc, ioe);
-                       hdpException.generateResponseDataMap(false, 
getMessage(ioe),
+                       hdpException.generateResponseDataMap(false, 
getMessage(ioe) +  errMsg,
                                        msgDesc + errMsg, null, null);
                        throw hdpException;
                } catch (SecurityException se) {
                        String msgDesc = "Unable to login to Hadoop environment 
["
                                        + serviceName + "]";
                        HadoopException hdpException = new 
HadoopException(msgDesc, se);
-                       hdpException.generateResponseDataMap(false, 
getMessage(se),
+                       hdpException.generateResponseDataMap(false, 
getMessage(se) +  errMsg,
                                        msgDesc + errMsg, null, null);
                        throw hdpException;
                } finally {

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
index 1ab2d4b..0f561d0 100644
--- 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
@@ -19,7 +19,11 @@
 
  package org.apache.ranger.plugin.client;
 
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
+
+import org.apache.commons.lang.StringUtils;
 
 public class HadoopException extends RuntimeException {
 
@@ -57,4 +61,25 @@ public class HadoopException extends RuntimeException {
                responseData.put("fieldName", fieldName);
        }
 
+       public String getMessage(Throwable excp) {
+               List<String> errList = new ArrayList<String>();
+               while (excp != null) {
+                       if (!errList.contains(excp.getMessage() + ". \n")) {
+                               if (excp.getMessage() != null && 
!(excp.getMessage().equalsIgnoreCase(""))) {
+                                       errList.add(excp.getMessage() + ". \n");
+                               }
+                       }
+                       excp = excp.getCause();
+               }
+               return StringUtils.join(errList, "");
+       }
+
+       public HashMap<String,Object> getResponseData() {
+               return responseData;
+       }
+
+       public void setReponseData(HashMap<String,Object> responseData) {
+               this.responseData = responseData;
+       }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java
 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java
index 9832991..e5031af 100644
--- 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java
+++ 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/RangerServiceHBase.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.model.RangerService;
 import org.apache.ranger.plugin.model.RangerServiceDef;
 import org.apache.ranger.plugin.service.RangerBaseService;
@@ -56,7 +57,7 @@ public class RangerServiceHBase extends RangerBaseService {
                if ( configs != null) {
                        try  {
                                ret = 
HBaseResourceMgr.connectionTest(serviceName, configs);
-                       } catch (Exception e) {
+                       } catch (HadoopException e) {
                                LOG.error("<== 
RangerServiceHBase.validateConfig() Error:" + e);
                                throw e;
                        }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
index f28a598..1979905 100644
--- 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
+++ 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseClient.java
@@ -90,8 +90,8 @@ public class HBaseClient extends BaseClient {
                return connectionProp;
        }
        
-       public static HashMap<String, Object> connectionTest(String dataSource,
-                       Map<String, String> configs) {
+       public static HashMap<String, Object> connectionTest (String dataSource,
+                       Map<String, String> configs) throws Exception {
 
                HashMap<String, Object> responseData = new HashMap<String, 
Object>();
                final String errMsg = " You can still save the repository and 
start creating "
@@ -102,7 +102,12 @@ public class HBaseClient extends BaseClient {
                HBaseClient connectionObj = new HBaseClient(dataSource,
                                                                                
configs);
                if (connectionObj != null) {
-                       connectivityStatus = connectionObj.getHBaseStatus();
+                       try {
+                               connectivityStatus = 
connectionObj.getHBaseStatus();
+                       } catch ( HadoopException e) {
+                               LOG.error("<== HBaseClient.testConnection(): 
Unable to retrieve any databases using given parameters", e);
+                               throw e;
+                       }
                }
                
                if (connectivityStatus) {
@@ -117,7 +122,7 @@ public class HBaseClient extends BaseClient {
                return responseData;
        }
        
-       public boolean getHBaseStatus() {
+       public boolean getHBaseStatus() throws HadoopException{
                boolean hbaseStatus = false;
                subj = getLoginSubject();
                final String errMsg = " You can still save the repository and 
start creating "
@@ -219,7 +224,11 @@ public class HBaseClient extends BaseClient {
                }               
        }
 
-       public List<String> getTableList(final String tableNameMatching, final 
List<String> existingTableList ) {
+       public List<String> getTableList(final String tableNameMatching, final 
List<String> existingTableList ) throws HadoopException {
+               if (LOG.isDebugEnabled()) {
+                       LOG.debug("==> HbaseClient.getTableList()  
tableNameMatching " + tableNameMatching + " ExisitingTableList " +  
existingTableList);
+               }
+
                List<String> ret = null ;
                final String errMsg = " You can still save the repository and 
start creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -310,11 +319,18 @@ public class HBaseClient extends BaseClient {
                                        
                                }) ;
                }
+               if (LOG.isDebugEnabled()) {
+                       LOG.debug("<== HbaseClient.getTableList() " + ret);
+               }
                return ret ;
        }
        
        
        public List<String> getColumnFamilyList(final String 
columnFamilyMatching, final List<String> tableList,final List<String> 
existingColumnFamilies) {
+               if (LOG.isDebugEnabled()) {
+                       LOG.debug("==> HbaseClient.getColumnFamilyList()  
columnFamilyMatching " + columnFamilyMatching + " ExisitingTableList " +  
tableList + "existingColumnFamilies " + existingColumnFamilies);
+               }
+
                List<String> ret = null ;
                final String errMsg = " You can still save the repository and 
start creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -431,6 +447,9 @@ public class HBaseClient extends BaseClient {
                                throw hdpException;
                        }
                }
+               if (LOG.isDebugEnabled()) {
+                       LOG.debug("<== HbaseClient.getColumnFamilyList() " + 
ret);
+               }
                return ret ;
        }
 

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java
 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java
index 094385c..f32b8ee 100644
--- 
a/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java
+++ 
b/hbase-agent/src/main/java/org/apache/ranger/services/hbase/client/HBaseResourceMgr.java
@@ -26,6 +26,7 @@ import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.service.ResourceLookupContext;
 import org.apache.ranger.plugin.util.TimedEventUtil;
 
@@ -45,7 +46,7 @@ public class HBaseResourceMgr {
                
                try {
                        ret = HBaseClient.connectionTest(serviceName, configs);
-               } catch (Exception e) {
+               } catch (HadoopException e) {
                        LOG.error("<== HBaseResourceMgr.connectionTest() Error: 
" + e) ;
                  throw e;
                }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
index 5f07740..bdf29f7 100644
--- 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.model.RangerService;
 import org.apache.ranger.plugin.model.RangerServiceDef;
 import org.apache.ranger.plugin.service.RangerBaseService;
@@ -55,8 +56,8 @@ public class RangerServiceHdfs extends RangerBaseService {
                if ( configs != null) {
                        try  {
                                ret = 
HdfsResourceMgr.connectionTest(serviceName, configs);
-                       } catch (Exception e) {
-                               LOG.error("<== RangerServiceHdfs.validateConfig 
Error:" + e);
+                       } catch (HadoopException e) {
+                               LOG.error("<== RangerServiceHdfs.validateConfig 
Error: " + e.getMessage(),e);
                                throw e;
                        }
                }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
index 57b260d..128927d 100644
--- 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
@@ -23,6 +23,7 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.net.UnknownHostException;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.util.*;
 
 import javax.security.auth.Subject;
@@ -61,7 +62,7 @@ public class HdfsClient extends BaseClient {
 
        }
        
-       private List<String> listFilesInternal(String baseDir, String 
fileMatching, final List<String> pathList) {
+       private List<String> listFilesInternal(String baseDir, String 
fileMatching, final List<String> pathList) throws  HadoopException {
                List<String> fileList = new ArrayList<String>() ;
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -155,14 +156,13 @@ public class HdfsClient extends BaseClient {
        }
 
 
-       public List<String> listFiles(final String baseDir, final String 
fileMatching, final List<String> pathList) {
+       public List<String> listFiles(final String baseDir, final String 
fileMatching, final List<String> pathList) throws Exception {
 
-               PrivilegedAction<List<String>> action = new 
PrivilegedAction<List<String>>() {
+               PrivilegedExceptionAction<List<String>> action = new 
PrivilegedExceptionAction<List<String>>() {
                        @Override
-                       public List<String> run() {
+                       public List<String> run() throws Exception {
                                return listFilesInternal(baseDir, fileMatching, 
pathList) ;
                        }
-                       
                };
                return Subject.doAs(getLoginSubject(),action) ;
        }
@@ -179,7 +179,12 @@ public class HdfsClient extends BaseClient {
                String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
                
                HdfsClient fs = new HdfsClient(repositoryName, null) ;
-               List<String> fsList = fs.listFiles(baseDir, 
fileNameToMatch,null) ;
+               List<String> fsList = null;
+               try {
+                       fsList = fs.listFiles(baseDir, fileNameToMatch,null);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
                if (fsList != null && fsList.size() > 0) {
                        for(String s : fsList) {
                                System.out.println(s) ;
@@ -191,8 +196,9 @@ public class HdfsClient extends BaseClient {
        }
 
        public static HashMap<String, Object> connectionTest(String serviceName,
-                       Map<String, String> configs) {
+                       Map<String, String> configs) throws Exception {
 
+       LOG.info("===> HdfsClient.testConnection()" );
     HashMap<String, Object> responseData = new HashMap<String, Object>();
     boolean connectivityStatus = false;
 
@@ -204,29 +210,38 @@ public class HdfsClient extends BaseClient {
     }
 
     if (validateConfigsMsg == null) {
+
                  HdfsClient connectionObj = new HdfsClient(serviceName, 
configs);
                  if (connectionObj != null) {
-                       List<String> testResult = connectionObj.listFiles("/", 
null,null);
-                         if (testResult != null && testResult.size() != 0) {
+                       List<String> testResult = null;
+                       try {
+                                testResult = connectionObj.listFiles("/", 
null,null);
+                       } catch (HadoopException e) {
+                               LOG.error("<== HdfsClient.testConnection() 
error " + e.getMessage(),e );
+                                       throw e;
+                       }
+
+                       if (testResult != null && testResult.size() != 0) {
                                connectivityStatus = true;
-                         }
-                 }
+                       }
+               }
     }
-
+        String testconnMsg = null;
                if (connectivityStatus) {
-                       String successMsg = "ConnectionTest Successful";
-                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
+                       testconnMsg = "ConnectionTest Successful";
+                       generateResponseDataMap(connectivityStatus, 
testconnMsg, testconnMsg,
                                        null, null, responseData);
                } else {
-                       String failureMsg = "Unable to retrieve any files using 
given parameters, "
-                                       + "You can still save the repository 
and start creating policies, "
-                                       + "but you would not be able to use 
autocomplete for resource names. "
-                                       + "Check xa_portal.log for more info. ";
+                       testconnMsg = "Unable to retrieve any files using given 
parameters, "
+                               + "You can still save the repository and start 
creating policies, "
+                               + "but you would not be able to use 
autocomplete for resource names. "
+                               + "Check xa_portal.log for more info. ";
       String additionalMsg = (validateConfigsMsg != null)  ?
-        validateConfigsMsg : failureMsg;
-                       generateResponseDataMap(connectivityStatus, failureMsg, 
additionalMsg,
+        validateConfigsMsg : testconnMsg;
+                       generateResponseDataMap(connectivityStatus, 
testconnMsg, additionalMsg,
                                        null, null, responseData);
                }
+               LOG.info("<== HdfsClient.testConnection(): Status " + 
testconnMsg );
                return responseData;
        }
 

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
index 998db9c..18cbc12 100644
--- 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
@@ -43,7 +43,7 @@ public class HdfsConnectionMgr {
        }
        
        
-       public HdfsClient getHadoopConnection(final String serviceName, final 
String serviceType, final Map<String,String> configs) {
+       public HdfsClient getHadoopConnection(final String serviceName, final 
String serviceType, final Map<String,String> configs) throws Exception{
                HdfsClient hdfsClient = null;
                if (serviceType != null) {
                        // get it from the cache
@@ -62,6 +62,7 @@ public class HdfsConnectionMgr {
                                                } catch(Exception e){
                                                        LOG.error("Error 
establishing connection for HDFS repository : "
                                                                        + 
serviceName, e);
+                                                       throw e;
                                                }
                                                
                                        } else {
@@ -78,6 +79,7 @@ public class HdfsConnectionMgr {
                                                } catch(Exception e){
                                                        LOG.error("Error 
establishing connection for HDFS repository : "
                                                                        + 
serviceName + " using configuration : " + configs, e);
+                                                       throw e;
                                                }
                                        }       
                                        HdfsClient oldClient = 
hdfsConnectionCache.putIfAbsent(serviceName, hdfsClient);
@@ -87,7 +89,14 @@ public class HdfsConnectionMgr {
                                        }
                                        repoConnectStatusMap.put(serviceName, 
true);
                                } else {
-                                       List<String> testConnect = 
hdfsClient.listFiles("/", "*",null);
+                                       List<String> testConnect = null;
+                                       try {
+                                               testConnect = 
hdfsClient.listFiles("/", "*",null);
+                                       } catch ( Exception e) {
+                                               LOG.error("Error establishing 
connection for HDFS repository : "
+                                                       + serviceName + " using 
configuration : " + configs, e);
+                                               throw e;
+                                       }
                                        if(testConnect == null){
                                                
hdfsConnectionCache.put(serviceName, hdfsClient);
                                                hdfsClient = 
getHadoopConnection(serviceName,serviceType,configs);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
index 5bce2cd..3afc1a9 100644
--- 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
@@ -27,6 +27,7 @@ import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.service.ResourceLookupContext;
 import org.apache.ranger.plugin.util.TimedEventUtil;
 
@@ -44,9 +45,9 @@ public class HdfsResourceMgr {
                
                try {
                        ret = HdfsClient.connectionTest(serviceName, configs);
-               } catch (Exception e) {
-                       LOG.error("<== HdfsResourceMgr.connectionTest Error: " 
+ e) ;
-                 throw e;
+               } catch (HadoopException e) {
+                       LOG.error("<== HdfsResourceMgr.testConnection Error: " 
+ e.getMessage(),  e) ;
+                       throw e;
                }
                
                if(LOG.isDebugEnabled()) {
@@ -116,7 +117,7 @@ public class HdfsResourceMgr {
                                                        + "\n Matching 
resources : " + resultList);
                                        }
                                }
-                       } catch (Exception e) {
+                       } catch (HadoopException e) {
                                LOG.error("Unable to get hdfs resources.", e);
                                throw e;
                        }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java
----------------------------------------------------------------------
diff --git 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java
 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java
index fa006ef..32d212c 100644
--- 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java
+++ 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/RangerServiceHive.java
@@ -23,6 +23,7 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.model.RangerService;
 import org.apache.ranger.plugin.model.RangerServiceDef;
 import org.apache.ranger.plugin.service.RangerBaseService;
@@ -54,7 +55,7 @@ public class RangerServiceHive extends RangerBaseService {
                if ( configs != null) {
                        try  {
                                ret = 
HiveResourceMgr.connectionTest(serviceName, configs);
-                       } catch (Exception e) {
+                       } catch (HadoopException e) {
                                LOG.error("<== RangerServiceHive.validateConfig 
Error:" + e);
                                throw e;
                        }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
----------------------------------------------------------------------
diff --git 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
index 84f4d9b..fe05ddd 100644
--- 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
+++ 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveClient.java
@@ -21,6 +21,7 @@
 
 import java.io.Closeable;
 import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
 import java.sql.Connection;
 import java.sql.Driver;
 import java.sql.DriverManager;
@@ -48,54 +49,62 @@ public class HiveClient extends BaseClient implements 
Closeable {
        
        Connection con = null ;
        boolean isKerberosAuth=false;
-       
 
-       public HiveClient(String serviceName) {
+       public HiveClient(String serviceName) throws Exception {
                super(serviceName, null) ;
-               initHive() ;
+                       initHive() ;
        }
-       
-       public HiveClient(String serviceName,Map<String,String> connectionProp) 
{
+
+       public HiveClient(String serviceName,Map<String,String> connectionProp) 
throws Exception{
                super(serviceName,connectionProp) ;
-               initHive() ;
+                       initHive() ;
        }
-       
-       public void initHive() {
+
+       public void initHive() throws Exception {
                isKerberosAuth = getConfigHolder().isKerberosAuthentication();
                if (isKerberosAuth) {
                        LOG.info("Secured Mode: JDBC Connection done with 
preAuthenticated Subject");
-                       Subject.doAs(getLoginSubject(), new 
PrivilegedAction<Object>() {
-                               public Object run() {
+                               Subject.doAs(getLoginSubject(), new 
PrivilegedExceptionAction<Void>(){
+                               public Void run() throws Exception {
                                        initConnection();
                                        return null;
-                               }
-                       }) ;                            
+                               }});
                }
                else {
                        LOG.info("Since Password is NOT provided, Trying to use 
UnSecure client with username and password");
                        final String userName = getConfigHolder().getUserName() 
;
                        final String password = getConfigHolder().getPassword() 
;
-                       Subject.doAs(getLoginSubject(), new 
PrivilegedAction<Object>() {
-                               public Object run() {
+                           Subject.doAs(getLoginSubject(), new 
PrivilegedExceptionAction<Void>() {
+                               public Void run() throws Exception {
                                        initConnection(userName,password);
                                        return null;
-                               }
-                       }) ;    
+                               }}) ;
                }
        }
        
-       public List<String> getDatabaseList(String databaseMatching, final 
List<String> databaseList){
+       public List<String> getDatabaseList(String databaseMatching, final 
List<String> databaseList) throws HadoopException{
                final String       dbMatching = databaseMatching;
                final List<String> dbList         = databaseList;
                List<String> dblist = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
                        public List<String>  run() {
-                               return getDBList(dbMatching,dbList);
+                               List<String> ret = null;
+                               try {
+                                       ret = getDBList(dbMatching,dbList);
+                               } catch ( HadoopException he) {
+                                       LOG.error("<== HiveClient 
getDatabaseList() :Unable to get the Database List", he);
+                                       throw he;
+                               }
+                               return ret;
                        }
                }) ;
                return dblist;
        }
                
-       private List<String> getDBList(String databaseMatching, 
List<String>dbList) {
+       private List<String> getDBList(String databaseMatching, 
List<String>dbList) throws  HadoopException {
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> HiveClient getDBList databaseMatching : 
" + databaseMatching + " ExcludedbList :" + dbList) ;
+               }
+
                List<String> ret = new ArrayList<String>() ;
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -108,9 +117,6 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                sql = sql + " like \"" + databaseMatching  + 
"\"" ;
                        }
                        try {
-                               if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient getDBList 
databaseMatching : " + databaseMatching + " ExcludedbList :" + dbList) ;
-                               }
                                stat =  con.createStatement()  ;
                                rs = stat.executeQuery(sql) ;
                                while (rs.next()) {
@@ -128,7 +134,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                hdpException.generateResponseDataMap(false, 
getMessage(sqlt),
                                                msgDesc + errMsg, null, null);
                                if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient.getDBList() 
Error : " + sqlt) ;
+                                       LOG.debug("<== HiveClient.getDBList() 
Error : ",  sqlt) ;
                                }
                                throw hdpException;
                        } catch (SQLException sqle) {
@@ -138,7 +144,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                hdpException.generateResponseDataMap(false, 
getMessage(sqle),
                                                msgDesc + errMsg, null, null);
                                if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient.getDBList() 
Error : " + sqle) ;
+                                       LOG.debug("<== HiveClient.getDBList() 
Error : " , sqle) ;
                                }
                                throw hdpException;
                        } finally {
@@ -147,23 +153,40 @@ public class HiveClient extends BaseClient implements 
Closeable {
                        }
                        
                }
+
+               if(LOG.isDebugEnabled()) {
+                         LOG.debug("<== HiveClient.getDBList(): " + ret);
+               }
+
                return ret ;
        }
        
-       public List<String> getTableList(String tableNameMatching, List<String> 
databaseList, List<String> tblNameList){
+       public List<String> getTableList(String tableNameMatching, List<String> 
databaseList, List<String> tblNameList) throws HadoopException  {
                final String       tblNameMatching = tableNameMatching;
                final List<String> dbList                  = databaseList;
                final List<String> tblList         = tblNameList;
-               
+
                List<String> tableList = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
                        public List<String>  run() {
-                               return 
getTblList(tblNameMatching,dbList,tblList);
+                                List<String> ret = null;
+                               try {
+                                       ret = 
getTblList(tblNameMatching,dbList,tblList);
+                               } catch(HadoopException he) {
+                                       LOG.error("<== HiveClient getTblList() 
:Unable to get the Table List", he);
+                                       throw he;
+                               }
+                               return ret;
                        }
                }) ;
+
                return tableList;
        }
 
-       public List<String> getTblList(String tableNameMatching, List<String> 
dbList, List<String> tblList) {
+       public List<String> getTblList(String tableNameMatching, List<String> 
dbList, List<String> tblList) throws HadoopException {
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> HiveClient getTableList() 
tableNameMatching : " + tableNameMatching + " ExcludedbList :" + dbList + 
"ExcludeTableList :" + tblList) ;
+               }
+
                List<String> ret = new ArrayList<String>() ;
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -171,14 +194,10 @@ public class HiveClient extends BaseClient implements 
Closeable {
                if (con != null) {
                        Statement stat =  null ;
                        ResultSet rs = null ;
-                       
+
                        String sql = null ;
-                       
+
                        try {
-                               
-                               if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient getTblList 
tableNameMatching: " + tableNameMatching + " dbList :" + dbList + " tblList: " 
+ tblList) ;
-                               }
                                if (dbList != null && !dbList.isEmpty()) {
                                        for ( String db: dbList) {
                                                sql = "use " + db;
@@ -222,7 +241,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                hdpException.generateResponseDataMap(false, 
getMessage(sqlt),
                                                msgDesc + errMsg, null, null);
                                if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient.getTblList() 
Error : " + sqlt) ;
+                                       LOG.debug("<== HiveClient.getTblList() 
Error : " , sqlt) ;
                                }
                                throw hdpException;
                        } catch (SQLException sqle) {
@@ -232,12 +251,17 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                hdpException.generateResponseDataMap(false, 
getMessage(sqle),
                                                msgDesc + errMsg, null, null);
                                if(LOG.isDebugEnabled()) {
-                                       LOG.debug("<== HiveClient.getTblList() 
Error : " + sqle) ;
+                                       LOG.debug("<== HiveClient.getTblList() 
Error : " , sqle) ;
                                }
                                throw hdpException;
                        }
                        
                }
+
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== HiveClient getTableList() " +  ret) ;
+               }
+
                return ret ;
        }
 
@@ -251,20 +275,31 @@ public class HiveClient extends BaseClient implements 
Closeable {
                return ret ;
        }
        
-       public List<String> getColumnList(String columnNameMatching, 
List<String> dbList, List<String> tblList, List<String> colList) {
+       public List<String> getColumnList(String columnNameMatching, 
List<String> dbList, List<String> tblList, List<String> colList) throws 
HadoopException {
                final String clmNameMatching    = columnNameMatching;
                final List<String> databaseList = dbList;
                final List<String> tableList    = tblList;
                final List<String> clmList      = colList;
                List<String> columnList = Subject.doAs(getLoginSubject(), new 
PrivilegedAction<List<String>>() {
                        public List<String>  run() {
-                                       return 
getClmList(clmNameMatching,databaseList,tableList,clmList);
+                                   List<String> ret = null;
+                                       try {
+                                               ret = 
getClmList(clmNameMatching,databaseList,tableList,clmList);
+                                       } catch ( HadoopException he) {
+                                               LOG.error("<== HiveClient 
getColumnList() :Unable to get the Column List", he);
+                                               throw he;
+                                       }
+                                       return ret;
                                }
                        }) ;
                return columnList;
        }
        
-       public List<String> getClmList(String columnNameMatching,List<String> 
dbList, List<String> tblList, List<String> colList) {
+       public List<String> getClmList(String columnNameMatching,List<String> 
dbList, List<String> tblList, List<String> colList) throws HadoopException {
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== HiveClient.getClmList() 
columnNameMatching: " + columnNameMatching + " dbList :" + dbList +  " tblList: 
" + tblList + " colList: " + colList) ;
+               }
+
                List<String> ret = new ArrayList<String>() ;
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
@@ -281,11 +316,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                        ResultSet rs = null ;
                        
                        String sql = null ;
-                       
-                       if(LOG.isDebugEnabled()) {
-                               LOG.debug("<== HiveClient.getClmList() 
columnNameMatching: " + columnNameMatching + " dbList :" + dbList +  " tblList: 
" + tblList + " colList: " + colList) ;
-                       }
-                       
+
                        if (dbList != null && !dbList.isEmpty() && 
                                tblList != null && !tblList.isEmpty()) {
                                for (String db: dbList) {
@@ -325,7 +356,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                                                
hdpException.generateResponseDataMap(false, getMessage(sqlt),
                                                                                
msgDesc + errMsg, null, null);
                                                                
if(LOG.isDebugEnabled()) {
-                                                                       
LOG.debug("<== HiveClient.getClmList() Error : " + sqlt) ;
+                                                                       
LOG.debug("<== HiveClient.getClmList() Error : " ,sqlt) ;
                                                                }
                                                                throw 
hdpException;
                                                        } catch (SQLException 
sqle) {
@@ -335,7 +366,7 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                                                
hdpException.generateResponseDataMap(false, getMessage(sqle),
                                                                                
msgDesc + errMsg, null, null);
                                                                
if(LOG.isDebugEnabled()) {
-                                                                       
LOG.debug("<== HiveClient.getClmList() Error : " + sqle) ;
+                                                                       
LOG.debug("<== HiveClient.getClmList() Error : " ,sqle) ;
                                                                }
                                                                throw 
hdpException;
                                                        } finally {
@@ -346,6 +377,11 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                }
                        }
                }
+
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== HiveClient.getClmList() " + ret ) ;
+               }
+
                return ret ;
        }
        
@@ -389,12 +425,17 @@ public class HiveClient extends BaseClient implements 
Closeable {
                }
        }
 
-       private void initConnection() {
-               initConnection(null,null) ;
+       private void initConnection() throws HadoopException{
+           try {
+          initConnection(null,null) ;
+           } catch (HadoopException he) {
+          LOG.error("Unable to Connect to Hive", he);
+          throw he;
+           }
        }
 
        
-       private void initConnection(String userName, String password) {
+       private void initConnection(String userName, String password) throws 
HadoopException  {
        
                Properties prop = getConfigHolder().getRangerSection() ;
                String driverClassName = 
prop.getProperty("jdbc.driverClassName") ;
@@ -413,6 +454,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, e);
                                hdpException.generateResponseDataMap(false, 
getMessage(e),
                                                msgDesc + errMsg, null, null);
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                        } catch (IllegalAccessException ilae) {
                                String msgDesc = "initConnection: Class or its 
nullary constructor might not accessible."
@@ -420,6 +464,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, ilae);
                                hdpException.generateResponseDataMap(false, 
getMessage(ilae),
                                                msgDesc + errMsg, null, null);
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                        } catch (InstantiationException ie) {
                                String msgDesc = "initConnection: Class may not 
have its nullary constructor or "
@@ -428,6 +475,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, ie);
                                hdpException.generateResponseDataMap(false, 
getMessage(ie),
                                                msgDesc + errMsg, null, null);
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                                
                        } catch (ExceptionInInitializerError eie) {
@@ -437,6 +487,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, eie);
                                hdpException.generateResponseDataMap(false, 
getMessage(eie),
                                                msgDesc + errMsg, null, null);
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                        } catch (SecurityException se) {
                                String msgDesc = "initConnection: unable to 
initiate connection to hive thrift server instance,"
@@ -446,6 +499,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, se);
                                hdpException.generateResponseDataMap(false, 
getMessage(se),
                                                msgDesc + errMsg, null, null);
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                        } catch (Throwable t) {
                                String msgDesc = "initConnection: Unable to 
connect to Hive Thrift Server instance, "
@@ -453,6 +509,9 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                HadoopException hdpException = new 
HadoopException(msgDesc, t);
                                hdpException.generateResponseDataMap(false, 
getMessage(t),
                                                msgDesc + errMsg, null, 
"jdbc.driverClassName");
+                               if ( LOG.isDebugEnabled()) {
+                                       LOG.debug(msgDesc, hdpException);
+                               }
                                throw hdpException;
                        }
                }
@@ -471,19 +530,28 @@ public class HiveClient extends BaseClient implements 
Closeable {
                        HadoopException hdpException = new 
HadoopException(msgDesc, e);
                        hdpException.generateResponseDataMap(false, 
getMessage(e), msgDesc
                                        + errMsg, null, null);
+                       if ( LOG.isDebugEnabled()) {
+                               LOG.debug(msgDesc, hdpException);
+                       }
                        throw hdpException;
                } catch (SecurityException se) {
                        String msgDesc = "Unable to connect to Hive Thrift 
Server instance.";
                        HadoopException hdpException = new 
HadoopException(msgDesc, se);
                        hdpException.generateResponseDataMap(false, 
getMessage(se), msgDesc
                                        + errMsg, null, null);
+                       if ( LOG.isDebugEnabled()) {
+                               LOG.debug(msgDesc, hdpException);
+                       }
                        throw hdpException;
                } catch ( Throwable t) {
                        String msgDesc = "Unable to connect to Hive Thrift 
Server instance";
                        HadoopException hdpException = new 
HadoopException(msgDesc, t);
                        hdpException.generateResponseDataMap(false, 
getMessage(t),
                                        msgDesc + errMsg, null, url);
-                    throw hdpException;
+                       if ( LOG.isDebugEnabled()) {
+                               LOG.debug(msgDesc, hdpException);
+                       }
+               throw hdpException;
                }
        }
 
@@ -501,8 +569,13 @@ public class HiveClient extends BaseClient implements 
Closeable {
                        hc = new HiveClient(args[0]) ;
                        
                        if (args.length == 2) {
-                               List<String> dbList = 
hc.getDatabaseList(args[1],null) ;
-                               if (dbList.size() == 0) {
+                               List<String> dbList = null;
+                               try {
+                                       dbList = 
hc.getDatabaseList(args[1],null);
+                               } catch (Exception e) {
+                                       e.printStackTrace();
+                               }
+                               if (dbList != null && dbList.size() == 0) {
                                        System.out.println("No database found 
with db filter [" + args[1] + "]") ;
                                }
                                else {
@@ -534,6 +607,8 @@ public class HiveClient extends BaseClient implements 
Closeable {
                                }
                        }
                        
+               } catch(Exception e) {
+                       e.printStackTrace();
                }
                finally {
                        if (hc != null) {
@@ -543,35 +618,38 @@ public class HiveClient extends BaseClient implements 
Closeable {
        }
 
        public static HashMap<String, Object> connectionTest(String serviceName,
-                       Map<String, String> connectionProperties) {
-
+                       Map<String, String> connectionProperties) throws 
Exception {
+               HiveClient connectionObj = null;
                HashMap<String, Object> responseData = new HashMap<String, 
Object>();
                boolean connectivityStatus = false;
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
                                + "resource names. Check xa_portal.log for more 
info.";
-
-               HiveClient connectionObj = new HiveClient(serviceName,
-                               (HashMap<String, String>) connectionProperties);
-               if (connectionObj != null) {
-               
-                       List<String> testResult = 
connectionObj.getDatabaseList("*",null);
-                       if (testResult != null && testResult.size() != 0) {
-                               connectivityStatus = true;
+               List<String> testResult = null;
+               try {
+                       connectionObj = new HiveClient(serviceName,     
connectionProperties);
+                       if (connectionObj != null) {
+                               testResult = 
connectionObj.getDatabaseList("*",null);
+                               if (testResult != null && testResult.size() != 
0) {
+                                       connectivityStatus = true;
+                               }
+                               if (connectivityStatus) {
+                                       String successMsg = "ConnectionTest 
Successful";
+                                       
generateResponseDataMap(connectivityStatus, successMsg, successMsg,
+                                               null, null, responseData);
+                               } else {
+                                       String failureMsg = "Unable to retrieve 
any databases using given parameters.";
+                                       
generateResponseDataMap(connectivityStatus, failureMsg, failureMsg + errMsg,
+                                               null, null, responseData);
+                               }
+                       }
+               } catch ( Exception e) {
+                       throw e;
+               } finally  {
+                       if ( connectionObj != null) {
+                               connectionObj.close();
                        }
                }
-               if (connectivityStatus) {
-                       String successMsg = "ConnectionTest Successful";
-                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
-                                       null, null, responseData);
-               } else {
-                       String failureMsg = "Unable to retrieve any databases 
using given parameters.";
-                       generateResponseDataMap(connectivityStatus, failureMsg, 
failureMsg + errMsg,
-                                       null, null, responseData);
-               }
-               
-               connectionObj.close();
                return responseData;
        }
-       
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java
----------------------------------------------------------------------
diff --git 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java
 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java
index b51a0c1..69f31d8 100644
--- 
a/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java
+++ 
b/hive-agent/src/main/java/org/apache/ranger/services/hive/client/HiveResourceMgr.java
@@ -25,6 +25,7 @@ import java.util.concurrent.Callable;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.client.HadoopException;
 import org.apache.ranger.plugin.service.ResourceLookupContext;
 import org.apache.ranger.plugin.util.TimedEventUtil;
 
@@ -47,7 +48,7 @@ public class HiveResourceMgr {
                
                try {
                        ret = HiveClient.connectionTest(serviceName, configs);
-               } catch (Exception e) {
+               } catch (HadoopException e) {
                        LOG.error("<== HiveResourceMgr.connectionTest Error: " 
+ e) ;
                  throw e;
                }
@@ -181,6 +182,7 @@ public class HiveResourceMgr {
                                 }
                          } catch (Exception e) {
                                LOG.error("Unable to get hive resources.", e);
+                               throw e;
                        }
                }
 

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java
----------------------------------------------------------------------
diff --git 
a/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java
 
b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java
index 094226d..8af77b2 100644
--- 
a/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java
+++ 
b/knox-agent/src/main/java/org/apache/ranger/services/knox/client/KnoxClient.java
@@ -278,7 +278,6 @@ public class KnoxClient {
        public static HashMap<String, Object> connectionTest(String serviceName,
                                                                                
                Map<String, String> configs) {
 
-               List<String> strList = new ArrayList<String>();
                String errMsg = " You can still save the repository and start 
creating "
                                + "policies, but you would not be able to use 
autocomplete for "
                                + "resource names. Check xa_portal.log for more 
info.";
@@ -286,7 +285,7 @@ public class KnoxClient {
                HashMap<String, Object> responseData = new HashMap<String, 
Object>();
 
                KnoxClient knoxClient = getKnoxClient(serviceName, configs);
-               strList = getKnoxResources(knoxClient, "", null,null,null);
+               List<String> strList = getKnoxResources(knoxClient, "", 
null,null,null);
 
                if (strList != null && (strList.size() != 0)) {
                        connectivityStatus = true;

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/plugin-yarn/src/main/java/org/apache/ranger/services/yarn/client/YarnClient.java
----------------------------------------------------------------------
diff --git 
a/plugin-yarn/src/main/java/org/apache/ranger/services/yarn/client/YarnClient.java
 
b/plugin-yarn/src/main/java/org/apache/ranger/services/yarn/client/YarnClient.java
index eb47e0c..0c7bcad 100644
--- 
a/plugin-yarn/src/main/java/org/apache/ranger/services/yarn/client/YarnClient.java
+++ 
b/plugin-yarn/src/main/java/org/apache/ranger/services/yarn/client/YarnClient.java
@@ -225,14 +225,13 @@ public class YarnClient extends BaseClient {
        public static HashMap<String, Object> connectionTest(String serviceName,
                        Map<String, String> configs) {
 
-               List<String> strList = new ArrayList<String>();
                String errMsg = errMessage;
                boolean connectivityStatus = false;
                HashMap<String, Object> responseData = new HashMap<String, 
Object>();
 
                YarnClient yarnClient = getYarnClient(serviceName,
                                configs);
-               strList = getYarnResource(yarnClient, "",null);
+               List<String> strList = getYarnResource(yarnClient, "",null);
 
                if (strList != null && strList.size() > 0 ) {
                        if (LOG.isDebugEnabled()) {

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
----------------------------------------------------------------------
diff --git a/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java 
b/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
index 7950439..16b00cd 100644
--- a/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
+++ b/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
@@ -123,7 +123,7 @@ public class ServiceMgr {
                                                
                                HashMap<String, Object> respData = new 
HashMap<String, Object>();
                                if (e instanceof HadoopException) {
-                                       respData = ((HadoopException) 
e).responseData;
+                                       respData = ((HadoopException) 
e).getResponseData();
                                }
                                ret = generateResponseForTestConn(respData, 
msg);
                                LOG.error("==> ServiceMgr.validateConfig 
Error:" + e);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/security-admin/src/main/java/org/apache/ranger/common/TimedExecutor.java
----------------------------------------------------------------------
diff --git 
a/security-admin/src/main/java/org/apache/ranger/common/TimedExecutor.java 
b/security-admin/src/main/java/org/apache/ranger/common/TimedExecutor.java
index 643d882..ca2e2dc 100644
--- a/security-admin/src/main/java/org/apache/ranger/common/TimedExecutor.java
+++ b/security-admin/src/main/java/org/apache/ranger/common/TimedExecutor.java
@@ -36,6 +36,7 @@ import java.util.concurrent.TimeoutException;
 import javax.annotation.PostConstruct;
 
 import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.client.HadoopException;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.stereotype.Service;
@@ -91,14 +92,14 @@ public class TimedExecutor {
                                if (LOG.isDebugEnabled()) {
                                        LOG.debug(String.format("TimedExecutor: 
Caught exception[%s] for callable[%s]: detail[%s].  Re-throwing...", 
e.getClass().getName(), callable, e.getMessage()));
                                }
-                               throw e;
+                               HadoopException he = generateHadoopException(e);
+                               throw he;
                        } catch (TimeoutException e) {
                                if (LOG.isDebugEnabled()) {
                                        LOG.debug(String.format("TimedExecutor: 
Timed out waiting for callable[%s] to finish.  Cancelling the task.", 
callable));
                                }
                                boolean interruptRunningTask = true;
                                future.cancel(interruptRunningTask);
-                               LOG.debug("TimedExecutor: Re-throwing timeout 
exception to caller");
                                throw e;
                        }
                } catch (RejectedExecutionException e) {
@@ -116,6 +117,16 @@ public class TimedExecutor {
                _executorService.shutdownNow();
        }
        
+       private HadoopException generateHadoopException( Exception e) {
+               String msgDesc = "Unable to retrieve any files using given 
parameters, "
+                               + "You can still save the repository and start 
creating policies, "
+                               + "but you would not be able to use 
autocomplete for resource names. "
+                               + "Check xa_portal.log for more info. ";
+               HadoopException hpe = new HadoopException(e.getMessage(), e);
+               hpe.generateResponseDataMap(false, hpe.getMessage(e), msgDesc, 
null, null);
+               return hpe;
+       }
+
        static class LocalUncaughtExceptionHandler implements 
UncaughtExceptionHandler {
 
                @Override

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/3a21f744/storm-agent/src/main/java/org/apache/ranger/services/storm/client/StormClient.java
----------------------------------------------------------------------
diff --git 
a/storm-agent/src/main/java/org/apache/ranger/services/storm/client/StormClient.java
 
b/storm-agent/src/main/java/org/apache/ranger/services/storm/client/StormClient.java
index 84c2ebf..74170fe 100644
--- 
a/storm-agent/src/main/java/org/apache/ranger/services/storm/client/StormClient.java
+++ 
b/storm-agent/src/main/java/org/apache/ranger/services/storm/client/StormClient.java
@@ -300,14 +300,13 @@ public class StormClient {
        public static HashMap<String, Object> connectionTest(String serviceName,
                        Map<String, String> configs) {
 
-               List<String> strList = new ArrayList<String>();
                String errMsg = errMessage;
                boolean connectivityStatus = false;
                HashMap<String, Object> responseData = new HashMap<String, 
Object>();
 
                StormClient stormClient = getStormClient(serviceName,
                                configs);
-               strList = getStormResources(stormClient, "",null);
+               List<String> strList = getStormResources(stormClient, "",null);
 
                if (strList != null) {
                        connectivityStatus = true;

Reply via email to