Repository: incubator-ranger
Updated Branches:
  refs/heads/stack 217e18924 -> 447658578


Ranger-225:Ranger-LookupResource and ValidateConfig implementation for Hdfs 
Plugin and REST API


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/44765857
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/44765857
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/44765857

Branch: refs/heads/stack
Commit: 447658578decf33d2b68d872a32db59227dfef1b
Parents: 217e189
Author: rmani <rm...@hortonworks.com>
Authored: Sun Feb 1 12:41:35 2015 -0800
Committer: rmani <rm...@hortonworks.com>
Committed: Sun Feb 1 12:41:35 2015 -0800

----------------------------------------------------------------------
 .../security/KrbPasswordSaverLoginModule.java   |  77 ++++
 .../hadoop/security/SecureClientLogin.java      | 134 +++++++
 .../apache/ranger/plugin/client/BaseClient.java | 162 +++++++++
 .../ranger/plugin/client/HadoopClassLoader.java | 104 ++++++
 .../plugin/client/HadoopConfigHolder.java       | 360 +++++++++++++++++++
 .../ranger/plugin/client/HadoopException.java   |  60 ++++
 .../plugin/service/RangerBaseService.java       |   3 +-
 .../plugin/service/ResourceLookupContext.java   |  13 +
 .../ranger/plugin/util/TimedEventUtil.java      |  71 ++++
 .../main/resources/resourcenamemap.properties   |  33 ++
 .../ranger/services/hdfs/RangerServiceHdfs.java | 110 ++++++
 .../ranger/services/hdfs/client/HdfsClient.java | 219 +++++++++++
 .../services/hdfs/client/HdfsConnectionMgr.java | 111 ++++++
 .../services/hdfs/client/HdfsResourceMgr.java   | 127 +++++++
 .../ranger/services/hdfs/HdfsClientTester.java  |  63 ++++
 .../services/hdfs/TestRangerServiceHdfs.java    | 164 +++++++++
 .../java/org/apache/ranger/biz/ServiceMgr.java  | 152 ++++++++
 .../org/apache/ranger/rest/ServiceREST.java     |  36 +-
 18 files changed, 1995 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
 
b/agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
new file mode 100644
index 0000000..6dbbb13
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/hadoop/security/KrbPasswordSaverLoginModule.java
@@ -0,0 +1,77 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.hadoop.security;
+
+import java.util.Map;
+
+import javax.security.auth.Subject;
+import javax.security.auth.callback.CallbackHandler;
+import javax.security.auth.login.LoginException;
+import javax.security.auth.spi.LoginModule;
+
+public class KrbPasswordSaverLoginModule implements LoginModule {
+       
+    public static final String USERNAME_PARAM = 
"javax.security.auth.login.name";
+    public static final String PASSWORD_PARAM = 
"javax.security.auth.login.password";
+
+       @SuppressWarnings("rawtypes")
+       private Map sharedState = null ;
+       
+       public KrbPasswordSaverLoginModule() {
+       }
+
+       @Override
+       public boolean abort() throws LoginException {
+               return true;
+       }
+
+       @Override
+       public boolean commit() throws LoginException {
+               return true;
+       }
+
+       @SuppressWarnings("unchecked")
+       @Override
+       public void initialize(Subject subject, CallbackHandler 
callbackhandler, Map<String, ?> sharedMap, Map<String, ?> options) {
+               
+               this.sharedState = sharedMap ;
+               
+               String userName = (options != null) ? 
(String)options.get(USERNAME_PARAM) : null ;
+               if (userName != null) {
+                       this.sharedState.put(USERNAME_PARAM,userName) ;
+               }
+               String password = (options != null) ? 
(String)options.get(PASSWORD_PARAM) : null ;
+               
+               if (password != null) {
+                       
this.sharedState.put(PASSWORD_PARAM,password.toCharArray()) ;
+               }
+       }
+
+       @Override
+       public boolean login() throws LoginException {
+               return true;
+       }
+
+       @Override
+       public boolean logout() throws LoginException {
+               return true;
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java 
b/agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
new file mode 100644
index 0000000..ba0c443
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/hadoop/security/SecureClientLogin.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.security;
+
+import java.io.IOException;
+import java.security.Principal;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import javax.security.auth.Subject;
+import javax.security.auth.login.AppConfigurationEntry;
+import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
+import javax.security.auth.login.LoginContext;
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+
+
+public class SecureClientLogin {
+
+       public synchronized static Subject loginUserFromKeytab(String user, 
String path) throws IOException {
+               try {
+                       Subject subject = new Subject();
+                       SecureClientLoginConfiguration loginConf = new 
SecureClientLoginConfiguration(true, user, path);
+                       LoginContext login = new 
LoginContext("hadoop-keytab-kerberos", subject, null, loginConf);
+                       subject.getPrincipals().add(new User(user, 
AuthenticationMethod.KERBEROS, login));
+                       login.login();
+                       return login.getSubject();
+               } catch (LoginException le) {
+                       throw new IOException("Login failure for " + user + " 
from keytab " + path, le);
+               }
+       }
+
+       public synchronized static Subject loginUserWithPassword(String user, 
String password) throws IOException {
+               String tmpPass = password;
+               try {
+                       Subject subject = new Subject();
+                       SecureClientLoginConfiguration loginConf = new 
SecureClientLoginConfiguration(false, user, password);
+                       LoginContext login = new 
LoginContext("hadoop-keytab-kerberos", subject, null, loginConf);
+                       subject.getPrincipals().add(new User(user, 
AuthenticationMethod.KERBEROS, login));
+                       login.login();
+                       return login.getSubject();
+               } catch (LoginException le) {
+                       throw new IOException("Login failure for " + user + " 
using password " + tmpPass.replaceAll(".","*"), le);
+               }
+       }
+
+       public synchronized static Subject login(String user) throws 
IOException {
+               Subject subject = new Subject();
+               subject.getPrincipals().add(new User(user));
+               return subject;
+       }
+
+       public static Set<Principal> getUserPrincipals(Subject aSubject) {
+               if (aSubject != null) {
+                       Set<User> list = aSubject.getPrincipals(User.class);
+                       if (list != null) {
+                               Set<Principal> ret = new HashSet<Principal>();
+                               for (User a : list) {
+                                       ret.add(a);
+                               }
+                               return ret;
+                       } else {
+                               return null;
+                       }
+               } else {
+                       return null;
+               }
+       }
+       
+       public static Principal createUserPrincipal(String aLoginName) {
+               return new User(aLoginName) ;
+       }
+
+}
+
+class SecureClientLoginConfiguration extends 
javax.security.auth.login.Configuration {
+
+       private Map<String, String> kerberosOptions = new HashMap<String, 
String>();
+       private boolean usePassword = false ;
+
+       public SecureClientLoginConfiguration(boolean useKeyTab, String 
principal, String credential) {
+               kerberosOptions.put("principal", principal);
+               kerberosOptions.put("debug", "false");
+               if (useKeyTab) {
+                       kerberosOptions.put("useKeyTab", "true");
+                       kerberosOptions.put("keyTab", credential);
+                       kerberosOptions.put("doNotPrompt", "true");
+               } else {
+                       usePassword = true ;
+                       kerberosOptions.put("useKeyTab", "false");
+                       
kerberosOptions.put(KrbPasswordSaverLoginModule.USERNAME_PARAM, principal);
+                       
kerberosOptions.put(KrbPasswordSaverLoginModule.PASSWORD_PARAM, credential);
+                       kerberosOptions.put("doNotPrompt", "false");
+                       kerberosOptions.put("useFirstPass", "true");
+                       kerberosOptions.put("tryFirstPass","false") ;
+               }
+               kerberosOptions.put("storeKey", "true");
+               kerberosOptions.put("refreshKrb5Config", "true");
+       }
+
+       @Override
+       public AppConfigurationEntry[] getAppConfigurationEntry(String appName) 
{
+               AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new 
AppConfigurationEntry(KerberosUtil.getKrb5LoginModuleName(), 
LoginModuleControlFlag.REQUIRED, kerberosOptions);
+               if (usePassword) {
+                       AppConfigurationEntry KERBEROS_PWD_SAVER = new 
AppConfigurationEntry(KrbPasswordSaverLoginModule.class.getName(), 
LoginModuleControlFlag.REQUIRED, kerberosOptions);
+                       return new AppConfigurationEntry[] { 
KERBEROS_PWD_SAVER, KEYTAB_KERBEROS_LOGIN };
+               }
+               else {
+                       return new AppConfigurationEntry[] { 
KEYTAB_KERBEROS_LOGIN };
+               }
+       }
+       
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
new file mode 100644
index 0000000..53f5859
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/BaseClient.java
@@ -0,0 +1,162 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.plugin.client;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.SecureClientLogin;
+
+public abstract class BaseClient {
+       private static final Log LOG = LogFactory.getLog(BaseClient.class) ;
+       
+       private String serviceName ;
+       private Subject loginSubject ;
+       private HadoopConfigHolder configHolder;
+       
+       protected Map<String,String> connectionProperties ;
+       
+       public BaseClient(String serivceName) {
+               this.serviceName = serivceName ;
+               init() ;
+               login() ;
+       }
+       
+       public BaseClient(String serivceName, Map<String,String> 
connectionProperties) {
+               this.serviceName = serivceName ;
+               this.connectionProperties = connectionProperties ;
+               init() ;
+               login() ;
+       }
+       
+       
+       private void init() {
+               if (connectionProperties == null) {
+                       configHolder = 
HadoopConfigHolder.getInstance(serviceName) ;
+               }
+               else {
+                       configHolder = 
HadoopConfigHolder.getInstance(serviceName,connectionProperties) ;
+               }
+       }
+       
+       
+       protected void login() {
+               ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
+               String errMsg = " You can still save the repository and start 
creating "
+                               + "policies, but you would not be able to use 
autocomplete for "
+                               + "resource names. Check xa_portal.log for more 
info.";
+               try {
+                       
Thread.currentThread().setContextClassLoader(configHolder.getClassLoader());
+                       String userName = configHolder.getUserName() ;
+                       if (userName == null) {
+                               String msgDesc = "Unable to find login username 
for hadoop environment, ["
+                                               + serviceName + "]";
+                               HadoopException hdpException = new 
HadoopException(msgDesc);
+                               hdpException.generateResponseDataMap(false, 
msgDesc, msgDesc + errMsg,
+                                               null, null);
+
+                               throw hdpException;
+                       }
+                       String keyTabFile = configHolder.getKeyTabFile() ;
+                       if (keyTabFile != null) {
+                               if ( configHolder.isKerberosAuthentication() ) {
+                                       LOG.info("Init Login: security enabled, 
using username/keytab");
+                                       loginSubject = 
SecureClientLogin.loginUserFromKeytab(userName, keyTabFile) ;
+                               }
+                               else {
+                                       LOG.info("Init Login: using username");
+                                       loginSubject = 
SecureClientLogin.login(userName) ;
+                               }
+                       }
+                       else {
+                               String password = configHolder.getPassword() ;
+                               if ( configHolder.isKerberosAuthentication() ) {
+                                       LOG.info("Init Login: using 
username/password");
+                                       loginSubject = 
SecureClientLogin.loginUserWithPassword(userName, password) ;
+                               }
+                               else {
+                                       LOG.info("Init Login: security not 
enabled, using username");
+                                       loginSubject = 
SecureClientLogin.login(userName) ;
+                               }
+                       }
+               } catch (IOException ioe) {
+                       String msgDesc = "Unable to login to Hadoop environment 
["
+                                       + serviceName + "]";
+
+                       HadoopException hdpException = new 
HadoopException(msgDesc, ioe);
+                       hdpException.generateResponseDataMap(false, 
getMessage(ioe),
+                                       msgDesc + errMsg, null, null);
+                       throw hdpException;
+               } catch (SecurityException se) {
+                       String msgDesc = "Unable to login to Hadoop environment 
["
+                                       + serviceName + "]";
+                       HadoopException hdpException = new 
HadoopException(msgDesc, se);
+                       hdpException.generateResponseDataMap(false, 
getMessage(se),
+                                       msgDesc + errMsg, null, null);
+                       throw hdpException;
+               } finally {
+                       Thread.currentThread().setContextClassLoader(prevCl);
+               }
+       }
+       
+       public String getSerivceName() {
+               return serviceName ;
+       }
+
+       protected Subject getLoginSubject() {
+               return loginSubject;
+       }
+
+       protected HadoopConfigHolder getConfigHolder() {
+               return configHolder;
+       }
+       
+       public static void generateResponseDataMap(boolean connectivityStatus,
+                       String message, String description, Long objectId,
+                       String fieldName, HashMap<String, Object> responseData) 
{
+               responseData.put("connectivityStatus", connectivityStatus);
+               responseData.put("message", message);
+               responseData.put("description", description);
+               responseData.put("objectId", objectId);
+               responseData.put("fieldName", fieldName);
+       }
+
+       public static String getMessage(Throwable excp) {
+               List<String> errList = new ArrayList<String>();
+               while (excp != null) {
+                       if (!errList.contains(excp.getMessage() + ". \n")) {
+                               if (excp.getMessage() != null && 
!(excp.getMessage().equalsIgnoreCase(""))) {
+                                       errList.add(excp.getMessage() + ". \n");
+                               }
+                       }
+                       excp = excp.getCause();
+               }
+               return StringUtils.join(errList, "");
+       }
+       
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
new file mode 100644
index 0000000..b90f4f6
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopClassLoader.java
@@ -0,0 +1,104 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.plugin.client;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HadoopClassLoader extends ClassLoader {
+       
+       private static final Log LOG = 
LogFactory.getLog(HadoopClassLoader.class) ;
+       
+       private HadoopConfigHolder confHolder ;
+       
+       public HadoopClassLoader(HadoopConfigHolder confHolder) {
+               super(Thread.currentThread().getContextClassLoader()) ;
+               this.confHolder = confHolder;
+       }
+       
+       
+       @Override
+       protected URL findResource(String resourceName) {
+               LOG.debug("findResource(" + resourceName + ") is called.") ;
+               URL ret = null;
+       
+               if (confHolder.hasResourceExists(resourceName)) {
+                       ret = buildResourceFile(resourceName) ;
+               }
+               else {
+                       ret = super.findResource(resourceName);
+               }
+               LOG.debug("findResource(" + resourceName + ") is returning [" + 
ret + "]") ;
+               return ret ;
+       }
+       
+       
+       @SuppressWarnings("deprecation")
+       private URL buildResourceFile(String aResourceName) {
+               URL ret = null ;
+               String prefix = aResourceName ;
+               String suffix = ".txt" ;
+
+               Properties prop = confHolder.getProperties(aResourceName) ;
+               LOG.debug("Building XML for: " + prop.toString());
+               if (prop != null && prop.size() > 0) {
+                       if (aResourceName.contains(".")) {
+                               int lastDotFound = aResourceName.indexOf(".") ;
+                               prefix = 
aResourceName.substring(0,lastDotFound) + "-" ;
+                               suffix = aResourceName.substring(lastDotFound) ;
+                       }
+                       
+                       try {
+                               File tempFile = File.createTempFile(prefix, 
suffix) ;
+                               tempFile.deleteOnExit();
+                               PrintWriter out = new PrintWriter(new 
FileWriter(tempFile)) ;
+                               out.println("<?xml version=\"1.0\"?>") ;
+                               out.println("<?xml-stylesheet type=\"text/xsl\" 
href=\"configuration.xsl\"?>") ;
+                               out.println("<configuration 
xmlns:xi=\"http://www.w3.org/2001/XInclude\";>") ;
+                               for(Object keyobj : prop.keySet()) {
+                                       String key = (String)keyobj;
+                                       String val = prop.getProperty(key) ;
+                                       if 
(HadoopConfigHolder.HADOOP_RPC_PROTECTION.equals(key) && (val == null || 
val.trim().isEmpty()))  {
+                                               continue;
+                                       }
+                                       out.println("<property><name>" + 
key.trim() + "</name><value>" + val + "</value></property>") ;
+                               }
+                               out.println("</configuration>") ;
+                               out.close() ;
+                               ret = tempFile.toURL() ;
+                       } catch (IOException e) {
+                               throw new HadoopException("Unable to load 
create hadoop configuration file [" + aResourceName + "]", e) ;
+                       }
+                       
+               }
+               
+               return ret ;
+
+       }
+       
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
new file mode 100644
index 0000000..b80780b
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopConfigHolder.java
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.plugin.client;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class HadoopConfigHolder  {
+       private static final Log LOG = 
LogFactory.getLog(HadoopConfigHolder.class) ;
+       public static final String GLOBAL_LOGIN_PARAM_PROP_FILE = 
"hadoop-login.properties" ;
+       public static final String DEFAULT_DATASOURCE_PARAM_PROP_FILE = 
"datasource.properties" ;
+       public static final String RESOURCEMAP_PROP_FILE = 
"resourcenamemap.properties" ;
+       public static final String DEFAULT_RESOURCE_NAME = "core-site.xml" ;
+       public static final String RANGER_SECTION_NAME = "xalogin.xml" ;
+       public static final String RANGER_LOGIN_USER_NAME_PROP = "username" ;
+       public static final String RANGER_LOGIN_KEYTAB_FILE_PROP = "keytabfile" 
;
+       public static final String RANGER_LOGIN_PASSWORD = "password" ;
+       public static final String HADOOP_SECURITY_AUTHENTICATION = 
"hadoop.security.authentication";
+       public static final String HADOOP_SECURITY_AUTHENTICATION_METHOD = 
"kerberos";
+       public static final String HADOOP_RPC_PROTECTION = 
"hadoop.rpc.protection";
+       
+
+       private static boolean initialized = false ;
+       private static Map<String,HashMap<String,Properties>> 
dataSource2ResourceListMap = new HashMap<String,HashMap<String,Properties>>() ;
+       private static Properties globalLoginProp = new Properties() ;
+       private static Map<String,HadoopConfigHolder> 
dataSource2HadoopConfigHolder = new HashMap<String,HadoopConfigHolder>() ;
+       private static Properties resourcemapProperties = null ;
+       
+       
+       private String datasourceName ;
+       private String userName ;
+       private String keyTabFile ;
+       private String password ;
+       private boolean isKerberosAuth ;
+       
+       private HadoopClassLoader classLoader ;
+       private Map<String,String>  connectionProperties; 
+       
+       public static HadoopConfigHolder getInstance(String aDatasourceName) {
+               HadoopConfigHolder ret = 
dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+               if (ret == null) {
+                       synchronized(HadoopConfigHolder.class) {
+                               HadoopConfigHolder temp = ret ;
+                               if (temp == null) {
+                                       ret = new 
HadoopConfigHolder(aDatasourceName) ;
+                                       
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+                               }
+                       }
+               }
+               return ret ;
+       }
+       
+       public static HadoopConfigHolder getInstance(String aDatasourceName, 
Map<String,String> connectionProperties) {
+               HadoopConfigHolder ret = 
dataSource2HadoopConfigHolder.get(aDatasourceName) ;
+               if (ret == null) {
+                       synchronized(HadoopConfigHolder.class) {
+                               HadoopConfigHolder temp = ret ;
+                               if (temp == null) {
+                                       ret = new 
HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+                                       
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+                               }
+                       }
+               }
+               else {
+                       if (connectionProperties !=null  &&  
!connectionProperties.equals(ret.connectionProperties)) {
+                               ret = new 
HadoopConfigHolder(aDatasourceName,connectionProperties) ;
+                               
dataSource2HadoopConfigHolder.remove(aDatasourceName) ;
+                               
dataSource2HadoopConfigHolder.put(aDatasourceName, ret) ;
+                       }
+               }
+               return ret ;
+       }
+       
+       
+
+       private HadoopConfigHolder(String aDatasourceName) {
+               datasourceName = aDatasourceName;
+               if ( ! initialized ) {
+                       init() ;
+               }
+               initLoginInfo();
+               initClassLoader() ;
+       }
+       
+       private HadoopConfigHolder(String aDatasourceName, Map<String,String> 
connectionProperties) {
+               datasourceName = aDatasourceName;
+               this.connectionProperties = connectionProperties ;
+               initConnectionProp() ;
+               initLoginInfo();
+               initClassLoader() ;
+       }
+       
+       private void initConnectionProp() {
+               for(String key : connectionProperties.keySet()) {
+                       
+                       String resourceName = getResourceName(key) ;
+                       
+                       if (resourceName == null) {
+                               resourceName = RANGER_SECTION_NAME ;
+                       }
+                       String val = connectionProperties.get(key) ;
+                       addConfiguration(datasourceName, resourceName, key, val 
);
+               }
+       }
+       
+       private String getResourceName(String key) {
+               
+               if (resourcemapProperties == null) {
+                       initResourceMap();
+               }
+               
+               if (resourcemapProperties != null) {
+                       return resourcemapProperties.getProperty(key);
+               }
+               else {
+                       return null;
+               }
+       }
+
+       public static void initResourceMap() {
+               if (resourcemapProperties == null) {
+                       resourcemapProperties = new Properties() ;
+                       InputStream in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(RESOURCEMAP_PROP_FILE)
 ;
+                       if (in != null) {
+                               try {
+                                       resourcemapProperties.load(in);
+                               } catch (IOException e) {
+                                       throw new HadoopException("Unable to 
load resource map properties from [" + RESOURCEMAP_PROP_FILE + "]", e);
+                               }
+                       }
+                       else {
+                               throw new HadoopException("Unable to locate 
resource map properties from [" + RESOURCEMAP_PROP_FILE + "] in the class 
path.");
+                       }
+               }
+       }
+
+       
+       
+       private static synchronized void init() {
+
+               if (initialized) {
+                       return ;
+               }
+
+               try {
+                       InputStream in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(DEFAULT_DATASOURCE_PARAM_PROP_FILE)
 ;
+                       if (in != null) {
+                               Properties prop = new Properties() ;
+                               try {
+                                       prop.load(in) ;
+                               } catch (IOException e) {
+                                       throw new HadoopException("Unable to 
get configuration information for Hadoop environments", e);
+                               }
+                               finally {
+                                       try {
+                                               in.close();
+                                       } catch (IOException e) {
+                                               // Ignored exception when the 
stream is closed.
+                                       } 
+                               }
+       
+                               if (prop.size() == 0) 
+                                       return ;
+                               
+                               for(Object keyobj : prop.keySet()) {
+                                       String key = (String)keyobj;
+                                       String val = prop.getProperty(key) ;
+                                       
+                                       int dotLocatedAt = key.indexOf(".") ;
+                                       
+                                       if (dotLocatedAt == -1) {
+                                               continue ;
+                                       }
+                                       
+                                       String dataSource = 
key.substring(0,dotLocatedAt) ;
+                                       
+                                       String propKey = 
key.substring(dotLocatedAt+1) ;
+                                       int resourceFoundAt =  
propKey.indexOf(".") ;
+                                       if (resourceFoundAt > -1) {
+                                               String resourceName = 
propKey.substring(0, resourceFoundAt) + ".xml" ; 
+                                               propKey = 
propKey.substring(resourceFoundAt+1) ;
+                                               addConfiguration(dataSource, 
resourceName, propKey, val) ;
+                                       }
+                                       
+                               }
+                       }
+                       
+                       in = 
HadoopConfigHolder.class.getClassLoader().getResourceAsStream(GLOBAL_LOGIN_PARAM_PROP_FILE)
 ;
+                       if (in != null) {
+                               Properties tempLoginProp = new Properties() ;
+                               try {
+                                       tempLoginProp.load(in) ;
+                               } catch (IOException e) {
+                                       throw new HadoopException("Unable to 
get login configuration information for Hadoop environments from file: [" + 
GLOBAL_LOGIN_PARAM_PROP_FILE + "]", e);
+                               }
+                               finally {
+                                       try {
+                                               in.close();
+                                       } catch (IOException e) {
+                                               // Ignored exception when the 
stream is closed.
+                                       } 
+                               }
+                               globalLoginProp = tempLoginProp ;
+                       }
+               }
+               finally {
+                       initialized = true ;
+               }
+       }
+       
+       
+       private void initLoginInfo() {
+               Properties prop = this.getRangerSection() ;
+               if (prop != null) {
+                       userName = 
prop.getProperty(RANGER_LOGIN_USER_NAME_PROP) ;
+                       keyTabFile = 
prop.getProperty(RANGER_LOGIN_KEYTAB_FILE_PROP) ;
+                       password = prop.getProperty(RANGER_LOGIN_PASSWORD) ;
+               
+                       if ( getHadoopSecurityAuthentication() != null) {
+                               isKerberosAuth = ( 
getHadoopSecurityAuthentication().equalsIgnoreCase(HADOOP_SECURITY_AUTHENTICATION_METHOD));
+                       }
+                       else {
+                               isKerberosAuth = (userName != null) && 
(userName.indexOf("@") > -1) ;
+                       }
+                                       
+               }
+       }
+       
+       private void initClassLoader() {
+               classLoader = new HadoopClassLoader(this) ;
+       }
+       
+       
+       public Properties getRangerSection() {
+               Properties prop = this.getProperties(RANGER_SECTION_NAME) ;
+               if (prop == null) {
+                       prop = globalLoginProp ;
+               }
+               return prop ;
+       }
+
+
+
+       private static void addConfiguration(String dataSource, String 
resourceName, String propertyName, String value) {
+
+               if (dataSource == null || dataSource.isEmpty()) {
+                       return ;
+               }
+               
+               if (propertyName == null || propertyName.isEmpty()) {
+                       return ;
+               }
+               
+               if (resourceName == null) {
+                       resourceName = DEFAULT_RESOURCE_NAME ;
+               }
+               
+               
+               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(dataSource) ;
+               
+               if (resourceName2PropertiesMap == null) {
+                       resourceName2PropertiesMap = new 
HashMap<String,Properties>() ;
+                       dataSource2ResourceListMap.put(dataSource, 
resourceName2PropertiesMap) ;
+               }
+               
+               Properties prop = resourceName2PropertiesMap.get(resourceName) ;
+               if (prop == null) {
+                       prop = new Properties() ;
+                       resourceName2PropertiesMap.put(resourceName, prop) ;
+               }
+               if (value == null) {
+                       prop.remove(propertyName) ;
+               }
+               else {
+                       prop.put(propertyName, value) ;
+               }
+       }
+       
+       
+       public String getDatasourceName() {
+               return datasourceName ;
+       }
+       
+       public boolean hasResourceExists(String aResourceName) {
+               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(datasourceName) ;
+               return (resourceName2PropertiesMap != null && 
resourceName2PropertiesMap.containsKey(aResourceName)) ;
+       }
+
+       public Properties getProperties(String aResourceName) {
+               Properties ret = null ;
+               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(datasourceName) ;
+               if (resourceName2PropertiesMap != null) {
+                       ret =  resourceName2PropertiesMap.get(aResourceName) ;
+               }
+               return ret ;
+       }
+       
+       public String getHadoopSecurityAuthentication() {
+               Properties repoParam = null ;
+               String ret = null;
+               
+               HashMap<String,Properties> resourceName2PropertiesMap  = 
dataSource2ResourceListMap.get(this.getDatasourceName()) ;
+               
+               if ( resourceName2PropertiesMap != null) {
+                       
repoParam=resourceName2PropertiesMap.get(DEFAULT_RESOURCE_NAME);
+               }
+               
+               if ( repoParam != null ) {
+                       ret = 
(String)repoParam.get(HADOOP_SECURITY_AUTHENTICATION);
+               }
+               return ret;
+       }
+       
+       public String getUserName() {
+               return userName;
+       }
+
+       public String getKeyTabFile() {
+               return keyTabFile;
+       }
+
+       public String getPassword() {
+               return password;
+       }
+
+       public HadoopClassLoader getClassLoader() {
+               return classLoader;
+       }
+
+       public boolean isKerberosAuthentication() {
+               return isKerberosAuth;
+       }
+
+  
+       
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
new file mode 100644
index 0000000..1ab2d4b
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/client/HadoopException.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.plugin.client;
+
+import java.util.HashMap;
+
+public class HadoopException extends RuntimeException {
+
+       private static final long serialVersionUID = 8872734935128535649L;
+       
+       public HashMap<String, Object> responseData;
+
+       public HadoopException() {
+               super();
+               // TODO Auto-generated constructor stub
+       }
+
+       public HadoopException(String message, Throwable cause) {
+               super(message, cause);
+               // TODO Auto-generated constructor stub
+       }
+
+       public HadoopException(String message) {
+               super(message);
+               // TODO Auto-generated constructor stub
+       }
+
+       public HadoopException(Throwable cause) {
+               super(cause);
+               // TODO Auto-generated constructor stub
+       }
+
+       public void generateResponseDataMap(boolean connectivityStatus,
+                       String message, String description, Long objectId, 
String fieldName) {
+               responseData = new HashMap<String, Object>();
+               responseData.put("connectivityStatus", connectivityStatus);
+               responseData.put("message", message);
+               responseData.put("description", description);
+               responseData.put("objectId", objectId);
+               responseData.put("fieldName", fieldName);
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/service/RangerBaseService.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/service/RangerBaseService.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/service/RangerBaseService.java
index 8eeb439..e7b61db 100644
--- 
a/agents-common/src/main/java/org/apache/ranger/plugin/service/RangerBaseService.java
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/service/RangerBaseService.java
@@ -19,6 +19,7 @@
 
 package org.apache.ranger.plugin.service;
 
+import java.util.HashMap;
 import java.util.List;
 
 import org.apache.ranger.plugin.model.RangerService;
@@ -49,7 +50,7 @@ public abstract class RangerBaseService {
                return service;
        }
 
-       public abstract void validateConfig() throws Exception;
+       public abstract HashMap<String, Object> validateConfig() throws 
Exception;
        
        public abstract List<String> lookupResource(ResourceLookupContext 
context) throws Exception;
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/service/ResourceLookupContext.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/service/ResourceLookupContext.java
 
b/agents-common/src/main/java/org/apache/ranger/plugin/service/ResourceLookupContext.java
index b5c3dda..913f824 100644
--- 
a/agents-common/src/main/java/org/apache/ranger/plugin/service/ResourceLookupContext.java
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/service/ResourceLookupContext.java
@@ -22,7 +22,20 @@ package org.apache.ranger.plugin.service;
 import java.util.List;
 import java.util.Map;
 
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
 
+import org.codehaus.jackson.annotate.JsonAutoDetect;
+import org.codehaus.jackson.annotate.JsonIgnoreProperties;
+import org.codehaus.jackson.annotate.JsonAutoDetect.Visibility;
+import org.codehaus.jackson.map.annotate.JsonSerialize;
+
+@JsonAutoDetect(getterVisibility=Visibility.NONE, 
setterVisibility=Visibility.NONE, fieldVisibility=Visibility.ANY)
+@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL )
+@JsonIgnoreProperties(ignoreUnknown=true)
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
 public class ResourceLookupContext {
        private String                    userInput;
        private String                    resourceName;

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/java/org/apache/ranger/plugin/util/TimedEventUtil.java
----------------------------------------------------------------------
diff --git 
a/agents-common/src/main/java/org/apache/ranger/plugin/util/TimedEventUtil.java 
b/agents-common/src/main/java/org/apache/ranger/plugin/util/TimedEventUtil.java
new file mode 100644
index 0000000..c6a7bbe
--- /dev/null
+++ 
b/agents-common/src/main/java/org/apache/ranger/plugin/util/TimedEventUtil.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.plugin.util;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.log4j.Logger;
+
+public class TimedEventUtil{
+
+       static final Logger logger = Logger.getLogger(TimedEventUtil.class);
+
+       public static void runWithTimeout(final Runnable runnable, long 
timeout, TimeUnit timeUnit) throws Exception {
+               timedTask(new Callable<Object>() {
+                       @Override
+                       public Object call() throws Exception {
+                               runnable.run();
+                               return null;
+                       }
+               }, timeout, timeUnit);
+       }
+
+       public static <T> T timedTask(Callable<T> callableObj, long timeout, 
+                       TimeUnit timeUnit) throws Exception{
+               
+               return callableObj.call();
+               
+               /*
+               final ExecutorService executor = 
Executors.newSingleThreadExecutor();
+               final Future<T> future = executor.submit(callableObj);
+               executor.shutdownNow();
+
+               try {
+                       return future.get(timeout, timeUnit);
+               } catch (TimeoutException | InterruptedException | 
ExecutionException e) {
+                       if(logger.isDebugEnabled()){
+                               logger.debug("Error executing task", e);
+                       }
+                       Throwable t = e.getCause();
+                       if (t instanceof Error) {
+                               throw (Error) t;
+                       } else if (t instanceof Exception) {
+                               throw (Exception) e;
+                       } else {
+                               throw new IllegalStateException(t);
+                       }
+               }
+               */
+               
+       }
+       
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/agents-common/src/main/resources/resourcenamemap.properties
----------------------------------------------------------------------
diff --git a/agents-common/src/main/resources/resourcenamemap.properties 
b/agents-common/src/main/resources/resourcenamemap.properties
new file mode 100644
index 0000000..d9b4d71
--- /dev/null
+++ b/agents-common/src/main/resources/resourcenamemap.properties
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+fs.default.name=core-site.xml
+hadoop.security.authentication=core-site.xml
+hadoop.security.authorization=core-site.xml
+hadoop.security.auth_to_local=core-site.xml
+hadoop.rpc.protection=core-site.xml
+dfs.datanode.kerberos.principal=hdfs-site.xml
+dfs.namenode.kerberos.principal=hdfs-site.xml
+dfs.secondary.namenode.kerberos.principal=hdfs-site.xml
+username=xalogin.xml
+keytabfile=xalogin.xml
+password=xalogin.xml
+hbase.master.kerberos.principal=hbase-site.xml
+hbase.rpc.engine=hbase-site.xml
+hbase.rpc.protection=hbase-site.xml
+hbase.security.authentication=hbase-site.xml
+hbase.zookeeper.property.clientPort=hbase-site.xml
+hbase.zookeeper.quorum=hbase-site.xml
+zookeeper.znode.parent=hbase-site.xml
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
new file mode 100644
index 0000000..65dcdf3
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/RangerServiceHdfs.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.services.hdfs;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ranger.plugin.model.RangerService;
+import org.apache.ranger.plugin.model.RangerServiceDef;
+import org.apache.ranger.plugin.service.RangerBaseService;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.services.hdfs.client.HdfsResourceMgr;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+
+public class RangerServiceHdfs extends RangerBaseService {
+
+       private static final Log LOG = 
LogFactory.getLog(RangerServiceHdfs.class);
+       
+       RangerService           service;
+       RangerServiceDef        serviceDef;
+       Map<String, String> configs;
+       String                      serviceName;
+       
+       public RangerServiceHdfs() {
+               super();
+       }
+       
+       @Override
+       public void init(RangerServiceDef serviceDef, RangerService service) {
+               super.init(serviceDef, service);
+               init();
+       }
+
+       @Override
+       public HashMap<String,Object> validateConfig() throws Exception {
+               HashMap<String, Object> ret = new HashMap<String, Object>();
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== RangerServiceHdfs.validateConfig 
Service: (" + service + " )");
+               }
+               
+               if ( configs != null) {
+                       try  {
+                               ret = 
HdfsResourceMgr.testConnection(service.getName(), service.getConfigs());
+                       } catch (Exception e) {
+                               LOG.error("<== RangerServiceHdfs.validateConfig 
Error:" + e);
+                               throw e;
+                       }
+               }
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== RangerServiceHdfs.validateConfig 
Response : (" + ret + " )");
+               }
+               
+               return ret;
+       }
+
+       @Override
+       public List<String> lookupResource(ResourceLookupContext context) 
throws Exception {
+               List<String> ret = new ArrayList<String>();
+       
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== RangerServiceHdfs.lookupResource 
Context: (" + context + ")");
+               }
+               
+               if (context != null) {
+                       try {
+                               ret  = 
HdfsResourceMgr.getHdfsResources(service.getName(),service.getConfigs(),context);
+                       } catch (Exception e) {
+                         LOG.error( "<==RangerServiceHdfs.lookupResource Error 
: " + e);
+                         throw e;
+                       }
+               }
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== RangerServiceHdfs.lookupResource 
Response: (" + ret + ")");
+               }
+               
+               return ret;
+       }
+       
+       public void init() {
+               service          = getService();
+               serviceDef       = getServiceDef();
+               serviceName  = service.getName();
+               configs          = service.getConfigs();
+       }
+       
+}
+
+

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
new file mode 100644
index 0000000..ff34f4f
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsClient.java
@@ -0,0 +1,219 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.services.hdfs.client;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.UnknownHostException;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.security.auth.Subject;
+
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.ranger.plugin.client.BaseClient;
+import org.apache.ranger.plugin.client.HadoopException;
+
+public class HdfsClient extends BaseClient {
+
+       private static final Log LOG = LogFactory.getLog(HdfsClient.class) ;
+
+       public HdfsClient(String serviceName) {
+               super(serviceName) ;
+       }
+       
+       public HdfsClient(String serviceName, Map<String,String> 
connectionProperties) {
+               super(serviceName,connectionProperties) ;
+       }
+       
+       private List<String> listFilesInternal(String baseDir, String 
fileMatching, final List<String> pathList) {
+               List<String> fileList = new ArrayList<String>() ;
+               ClassLoader prevCl = 
Thread.currentThread().getContextClassLoader() ;
+               String errMsg = " You can still save the repository and start 
creating "
+                               + "policies, but you would not be able to use 
autocomplete for "
+                               + "resource names. Check xa_portal.log for more 
info.";
+               try {
+                       
Thread.currentThread().setContextClassLoader(getConfigHolder().getClassLoader());
+                       String dirPrefix = (baseDir.endsWith("/") ? baseDir : 
(baseDir + "/")) ;
+                       String filterRegEx = null;
+                       if (fileMatching != null && 
fileMatching.trim().length() > 0) {
+                               filterRegEx = fileMatching.trim() ;
+                       }
+                       
+                       Configuration conf = new Configuration() ;
+                       UserGroupInformation.setConfiguration(conf);
+                       
+                       FileSystem fs = null ;
+                       try {
+                               fs = FileSystem.get(conf) ;
+                               
+                               FileStatus[] fileStats = fs.listStatus(new 
Path(baseDir)) ;
+                               
+                               if(LOG.isDebugEnabled()) {
+                                       LOG.debug("<== HdfsClient fileStatus : 
" + fileStats + " PathList :" + pathList) ;
+                               }
+                               
+                               if (fileStats != null) {
+                                       for(FileStatus stat : fileStats) {
+                                               Path path = stat.getPath() ;
+                                               String pathComponent = 
path.getName() ;
+                                               String prefixedPath = dirPrefix 
+ pathComponent;
+                                       if ( pathList != null && 
pathList.contains(prefixedPath)) {
+                                               continue;
+                                       }
+                                               if (filterRegEx == null) {
+                                                       
fileList.add(prefixedPath) ;
+                                               }
+                                               else if 
(FilenameUtils.wildcardMatch(pathComponent, fileMatching)) {
+                                                       
fileList.add(prefixedPath) ;
+                                               }
+                                       }
+                               }
+                       } catch (UnknownHostException uhe) {
+                               String msgDesc = "listFilesInternal: Unable to 
connect using given config parameters"
+                                               + " of Hadoop environment [" + 
getSerivceName() + "].";
+                               HadoopException hdpException = new 
HadoopException(msgDesc, uhe);
+                               hdpException.generateResponseDataMap(false, 
getMessage(uhe),
+                                               msgDesc + errMsg, null, null);
+                               if(LOG.isDebugEnabled()) {
+                                       LOG.debug("<== HdfsClient 
listFilesInternal Error : " + uhe) ;
+                               }
+                               throw hdpException;
+                       } catch (FileNotFoundException fne) {
+                               String msgDesc = "listFilesInternal: Unable to 
locate files using given config parameters "
+                                               + "of Hadoop environment [" + 
getSerivceName() + "].";
+                               HadoopException hdpException = new 
HadoopException(msgDesc, fne);
+                               hdpException.generateResponseDataMap(false, 
getMessage(fne),
+                                               msgDesc + errMsg, null, null);
+                               
+                               if(LOG.isDebugEnabled()) {
+                                       LOG.debug("<== HdfsClient 
listFilesInternal Error : " + fne) ;
+                               }
+                               
+                               throw hdpException;
+                       }
+                       finally {
+                       }
+               } catch (IOException ioe) {
+                       String msgDesc = "listFilesInternal: Unable to get 
listing of files for directory "
+                                       + baseDir + fileMatching 
+                                       + "] from Hadoop environment ["
+                                       + getSerivceName()
+                                       + "].";
+                       HadoopException hdpException = new 
HadoopException(msgDesc, ioe);
+                       hdpException.generateResponseDataMap(false, 
getMessage(ioe),
+                                       msgDesc + errMsg, null, null);
+                       if(LOG.isDebugEnabled()) {
+                               LOG.debug("<== HdfsClient listFilesInternal 
Error : " + ioe) ;
+                       }
+                       throw hdpException;
+
+               } catch (IllegalArgumentException iae) {
+                       String msgDesc = "Unable to get listing of files for 
directory ["
+                                       + baseDir + "] from Hadoop environment 
[" + getSerivceName()
+                                       + "].";
+                       HadoopException hdpException = new 
HadoopException(msgDesc, iae);
+                       hdpException.generateResponseDataMap(false, 
getMessage(iae),
+                                       msgDesc + errMsg, null, null);
+                       if(LOG.isDebugEnabled()) {
+                               LOG.debug("<== HdfsClient listFilesInternal 
Error : " + iae) ;
+                       }
+                       throw hdpException;
+               }
+               finally {
+                       Thread.currentThread().setContextClassLoader(prevCl);
+               }
+               return fileList ;
+       }
+
+
+       public List<String> listFiles(final String baseDir, final String 
fileMatching, final List<String> pathList) {
+
+               PrivilegedAction<List<String>> action = new 
PrivilegedAction<List<String>>() {
+                       @Override
+                       public List<String> run() {
+                               return listFilesInternal(baseDir, fileMatching, 
pathList) ;
+                       }
+                       
+               };
+               return Subject.doAs(getLoginSubject(),action) ;
+       }
+       
+       public static final void main(String[] args) {
+               
+               if (args.length < 2) {
+                       System.err.println("USAGE: java " + 
HdfsClient.class.getName() + " repositoryName  basedirectory  
[filenameToMatch]") ;
+                       System.exit(1) ;
+               }
+               
+               String repositoryName = args[0] ;
+               String baseDir = args[1] ;
+               String fileNameToMatch = (args.length == 2 ? null : args[2]) ;
+               
+               HdfsClient fs = new HdfsClient(repositoryName) ;
+               List<String> fsList = fs.listFiles(baseDir, 
fileNameToMatch,null) ;
+               if (fsList != null && fsList.size() > 0) {
+                       for(String s : fsList) {
+                               System.out.println(s) ;
+                       }
+               }
+               else {
+                       System.err.println("Unable to get file listing for [" + 
baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in 
repository [" + repositoryName + "]") ;
+               }
+       }
+
+       public static HashMap<String, Object> testConnection(String serviceName,
+                       Map<String, String> connectionProperties) {
+
+               HashMap<String, Object> responseData = new HashMap<String, 
Object>();
+               boolean connectivityStatus = false;
+               HdfsClient connectionObj = new HdfsClient(serviceName, 
connectionProperties);
+               if (connectionObj != null) {
+                       List<String> testResult = connectionObj.listFiles("/", 
null,null);
+                       if (testResult != null && testResult.size() != 0) {
+                               connectivityStatus = true;
+                       }
+               }
+               if (connectivityStatus) {
+                       String successMsg = "TestConnection Successful";
+                       generateResponseDataMap(connectivityStatus, successMsg, 
successMsg,
+                                       null, null, responseData);
+               } else {
+                       String failureMsg = "Unable to retrieve any files using 
given parameters, "
+                                       + "You can still save the repository 
and start creating policies, "
+                                       + "but you would not be able to use 
autocomplete for resource names. "
+                                       + "Check xa_portal.log for more info.";
+                       generateResponseDataMap(connectivityStatus, failureMsg, 
failureMsg,
+                                       null, null, responseData);
+               }
+               return responseData;
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
new file mode 100644
index 0000000..e13ee9e
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsConnectionMgr.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.services.hdfs.client;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.util.TimedEventUtil;
+import org.apache.ranger.plugin.store.ServiceStoreFactory;
+import org.apache.ranger.services.hdfs.client.HdfsClient;
+
+
+public class HdfsConnectionMgr {
+
+       protected Map<String, HdfsClient>       hdfdsConnectionCache = null;
+       protected Map<String, Boolean>          repoConnectStatusMap = null;
+
+       private static Logger logger = 
Logger.getLogger(HdfsConnectionMgr.class);
+       
+       public HdfsConnectionMgr(){
+               hdfdsConnectionCache = new HashMap<String, HdfsClient>();
+               repoConnectStatusMap = new HashMap<String, Boolean>();
+       }
+       
+       
+       public HdfsClient getHadoopConnection(final String serviceName, final 
Map<String,String> configs) {
+               HdfsClient hdfsClient = null;
+               String serviceType        = null;
+               try {
+                       serviceType = ServiceStoreFactory
+                                                                       
.instance()
+                                                                       
.getServiceStore()
+                                                                       
.getServiceByName(serviceName)
+                                                                       
.getType();
+               } catch (Exception ex) {
+                       logger.error("Service could not be found for the 
Service Name : " + serviceName , ex);
+               }
+               if (serviceType != null) {
+                       // get it from the cache
+                       synchronized (hdfdsConnectionCache) {
+                               hdfsClient = 
hdfdsConnectionCache.get(serviceType);
+                               if (hdfsClient == null) {
+                                       if(configs == null) {
+                                               final Callable<HdfsClient> 
connectHDFS = new Callable<HdfsClient>() {
+                                                       @Override
+                                                       public HdfsClient 
call() throws Exception {
+                                                               return new 
HdfsClient(serviceName);
+                                                       }
+                                               };
+                                               
+                                               try {
+                                                       hdfsClient = 
TimedEventUtil.timedTask(connectHDFS, 10, TimeUnit.SECONDS);
+                                               } catch(Exception e){
+                                                       logger.error("Error 
establishing connection for HDFS repository : "
+                                                                       + 
serviceName, e);
+                                               }
+                                               
+                                       } else {
+                                                                               
                
+                                               final Callable<HdfsClient> 
connectHDFS = new Callable<HdfsClient>() {
+                                                       @Override
+                                                       public HdfsClient 
call() throws Exception {
+                                                               return new 
HdfsClient(serviceName, configs);
+                                                       }
+                                               };
+                                               
+                                               try {
+                                                       hdfsClient = 
TimedEventUtil.timedTask(connectHDFS, 5, TimeUnit.SECONDS);
+                                               } catch(Exception e){
+                                                       logger.error("Error 
establishing connection for HDFS repository : "
+                                                                       + 
serviceName + " using configuration : " + configs, e);
+                                               }
+                                       }       
+                                       hdfdsConnectionCache.put(serviceType, 
hdfsClient);
+                                       repoConnectStatusMap.put(serviceType, 
true);
+                               } else {
+                                       List<String> testConnect = 
hdfsClient.listFiles("/", "*",null);
+                                       if(testConnect == null){
+                                               
hdfdsConnectionCache.put(serviceType, hdfsClient);
+                                               hdfsClient = 
getHadoopConnection(serviceName,configs);
+                                       }
+                               }
+                       }
+               } else {
+                       logger.error("Serice not found with name "+serviceName, 
new Throwable());
+               }
+
+               return hdfsClient;
+       }
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
new file mode 100644
index 0000000..b44c53a
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/services/hdfs/client/HdfsResourceMgr.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.services.hdfs.client;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.log4j.Logger;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.plugin.util.TimedEventUtil;
+
+public class HdfsResourceMgr {
+
+       public static final String PATH   = "path";
+       public static final Logger logger = 
Logger.getLogger(HdfsResourceMgr.class);
+       
+       public static HashMap<String, Object> testConnection(String 
serviceName, Map<String, String> configs) throws Exception {
+               HashMap<String, Object> ret = null;
+               
+               if(logger.isDebugEnabled()) {
+                       logger.debug("<== HdfsResourceMgr.testConnection 
ServiceName: "+ serviceName + "Configs" + configs ) ;
+               }       
+               
+               try {
+                       ret = HdfsClient.testConnection(serviceName, configs);
+               } catch (Exception e) {
+                 logger.error("<== HdfsResourceMgr.testConnection Error: " + 
e) ;
+                 throw e;
+               }
+               
+               if(logger.isDebugEnabled()) {
+                       logger.debug("<== HdfsResourceMgr.HdfsResourceMgr 
Result : "+ ret  ) ;
+               }       
+               return ret;
+       }
+       
+       public static List<String> getHdfsResources(String serviceName, 
Map<String, String> configs,ResourceLookupContext context) throws Exception {
+               
+               List<String> resultList                           = null;
+               String userInput                                          = 
context.getUserInput();
+               String resource                                           = 
context.getResourceName();
+               Map<String, List<String>> resourceMap = context.getResources();
+               final List<String>                pathList        = new 
ArrayList<String>();
+               
+               if( resource != null && resourceMap != null && 
resourceMap.get(PATH) != null) {
+                       for (String path: resourceMap.get(PATH)) {
+                               pathList.add(path);
+                       }
+               }
+               
+               if (serviceName != null && userInput != null) {
+                       try {
+                               if(logger.isDebugEnabled()) {
+                                       logger.debug("<== 
HdfsResourceMgr.HdfsResourceMgr UserInput: "+ userInput  + "configs: " + 
configs + "context: "  + context) ;
+                               }
+                               
+                               String wildCardToMatch;
+                               final HdfsClient hdfsClient = new 
HdfsConnectionMgr().getHadoopConnection(serviceName, configs);
+                               if (hdfsClient != null) {
+                                       Integer lastIndex = 
userInput.lastIndexOf("/");
+                                       if (lastIndex < 0) {
+                                               wildCardToMatch = userInput + 
"*";
+                                               userInput = "/";
+                                       } else if (lastIndex == 0 && 
userInput.length() == 1) {
+                                               wildCardToMatch = null;
+                                               userInput = "/";
+                                       } else if ((lastIndex + 1) == 
userInput.length()) {
+                                               wildCardToMatch = null;
+                                               userInput = 
userInput.substring(0, lastIndex + 1);
+                                       } else {
+                                               wildCardToMatch = 
userInput.substring(lastIndex + 1)
+                                                               + "*";
+                                               userInput = 
userInput.substring(0, lastIndex + 1);
+                                       }
+
+                                       final String finalBaseDir = userInput;
+                                       final String finalWildCardToMatch = 
wildCardToMatch;
+                                       final Callable<List<String>> 
callableObj = new Callable<List<String>>() {
+
+                                               @Override
+                                               public List<String> call() 
throws Exception {
+                                                       return 
hdfsClient.listFiles(finalBaseDir,
+                                                                       
finalWildCardToMatch, pathList);
+                                               }
+
+                                       };
+
+                                       resultList = 
TimedEventUtil.timedTask(callableObj, 5,TimeUnit.SECONDS); 
+                                       if(logger.isDebugEnabled()) {
+                                               logger.debug("Resource dir : " 
+ userInput
+                                                       + " wild card to match 
: " + wildCardToMatch
+                                                       + "\n Matching 
resources : " + resultList);
+                                       }
+                               }
+                       } catch (Exception e) {
+                               logger.error("Unable to get hdfs resources.", 
e);
+                               throw e;
+                       }
+
+               }
+               if(logger.isDebugEnabled()) {
+                       logger.debug("<== HdfsResourceMgr.HdfsResourceMgr 
Result : "+ resultList  ) ;
+               }       
+               return resultList;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
new file mode 100644
index 0000000..3947ac2
--- /dev/null
+++ 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+ package org.apache.ranger.services.hdfs;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.ranger.services.hdfs.client.HdfsClient;
+
+public class HdfsClientTester {
+
+       public static void main(String[] args) throws Throwable {
+               if (args.length < 3) {
+                       System.err.println("USAGE: java " + 
HdfsClient.class.getName() + " repositoryName propertyFile basedirectory  
[filenameToMatch]") ;
+                       System.exit(1) ;
+               }
+               
+               String repositoryName = args[0] ;
+               String propFile = args[1] ;
+               String baseDir = args[2] ;
+               String fileNameToMatch = (args.length == 3 ? null : args[3]) ;
+
+               Properties conf = new Properties() ;
+               
conf.load(HdfsClientTester.class.getClassLoader().getResourceAsStream(propFile));
+               
+               HashMap<String,String> prop = new HashMap<String,String>() ;
+               for(Object key : conf.keySet()) {
+                       Object val = conf.get(key) ;
+                       prop.put((String)key, (String)val) ;
+               }
+               
+               HdfsClient fs = new HdfsClient(repositoryName, prop) ;
+               List<String> fsList = fs.listFiles(baseDir, 
fileNameToMatch,null) ;
+               if (fsList != null && fsList.size() > 0) {
+                       for(String s : fsList) {
+                               System.out.println(s) ;
+                       }
+               }
+               else {
+                       System.err.println("Unable to get file listing for [" + 
baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in 
repository [" + repositoryName + "]") ;
+               }
+
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
new file mode 100644
index 0000000..dde8d7e
--- /dev/null
+++ 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
@@ -0,0 +1,164 @@
+package org.apache.ranger.services.hdfs;
+
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ranger.plugin.model.RangerService;
+import org.apache.ranger.plugin.model.RangerServiceDef;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.plugin.store.ServiceStore;
+import org.apache.ranger.plugin.store.ServiceStoreFactory;
+import org.apache.ranger.services.hdfs.RangerServiceHdfs;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+
+public class TestRangerServiceHdfs {
+       static ServiceStore svcStore              = null;
+       static final String     sdName            =  "svcDef-Hdfs";
+       static final String     serviceName   =  "Hdfsdev";
+       HashMap<String, Object> responseData  =  null;
+       Map<String, String>     configs           =  null;
+       RangerServiceHdfs               svcHdfs           =  null;
+       RangerServiceDef                sd                        =  null;
+       RangerService                   svc                       =  null;
+       ResourceLookupContext   lookupContext =  null;
+       
+       
+       @Before
+       public void setup() {
+               configs         = new HashMap<String,String>();
+               lookupContext = new ResourceLookupContext();
+               
+               buildHdfsConnectionConfig();
+               buildLookupContext();
+               
+               svcStore = ServiceStoreFactory.instance().getServiceStore();
+                               
+               sd               = new RangerServiceDef(sdName, 
"org.apache.ranger.service.hdfs.RangerServiceHdfs", "TestService", "test 
servicedef description", null, null, null, null, null);
+               svc      = new RangerService(sdName, serviceName, "unit test 
hdfs resource lookup and validateConfig",configs);
+               svcHdfs = new RangerServiceHdfs();
+               svcHdfs.init(sd, svc);
+               svcHdfs.init();
+               cleanupBeforeTest();
+               try {
+                       svcStore.createServiceDef(sd);
+                       svcStore.createService(svc);
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+       }
+       
+       @Test
+       public void testValidateConfig() {
+
+               HashMap<String,Object> ret = null;
+               String errorMessage = null;
+               
+               try { 
+                       ret = svcHdfs.validateConfig();
+               }catch (Exception e) {
+                       errorMessage = e.getMessage();
+               }
+               System.out.println(errorMessage);
+               if ( errorMessage != null) {
+                       assertTrue(errorMessage.contains("listFilesInternal"));
+               } else {
+                       assertNotNull(ret);
+               }
+       }
+       
+       
+       @Test
+       public void     testLookUpResource() {
+               List<String> ret        = new ArrayList<String>();
+               String errorMessage = null;
+               try {
+                       ret = svcHdfs.lookupResource(lookupContext);
+               }catch (Exception e) {
+                       errorMessage = e.getMessage();
+               }
+               System.out.println(errorMessage);
+               if ( errorMessage != null) {
+                       assertNotNull(errorMessage);
+               } else {
+                       assertNotNull(ret);
+               }
+               
+       }
+       
+       public void buildHdfsConnectionConfig() {
+               configs.put("username", "hdfsuser");
+               configs.put("password", "*******");
+               configs.put("fs.default.name", "hdfs://localhost:8020");
+               configs.put("hadoop.security.authorization","");
+               configs.put("hadoop.security.auth_to_local","");
+               configs.put("dfs.datanode.kerberos.principa","");
+               configs.put("dfs.namenode.kerberos.principal","");
+               configs.put("dfs.secondary.namenode.kerberos.principal","");
+               configs.put("commonNameForCertificate","");
+               configs.put("isencrypted","true");
+       }
+
+       public void buildLookupContext() {
+               Map<String, List<String>> resourceMap = new 
HashMap<String,List<String>>();
+               resourceMap.put(null, null);
+               lookupContext.setUserInput("app");
+               lookupContext.setResourceName(null);
+               lookupContext.setResources(resourceMap);
+       }
+       
+       public void cleanupBeforeTest() {
+               
+               try {
+                       List<RangerService> services = 
svcStore.getServices(null);
+                       for(RangerService service : services) {
+                               if(service.getName().startsWith(serviceName)) {
+                                       svcStore.deleteService(service.getId());
+                               }
+                       }
+
+                       List<RangerServiceDef> serviceDefs = 
svcStore.getServiceDefs(null);
+                       for(RangerServiceDef serviceDef : serviceDefs) {
+                                       
if(serviceDef.getName().startsWith(sdName)) {
+                                               
svcStore.deleteServiceDef(serviceDef.getId());
+                                       }
+                       }
+               } catch (Exception e) {
+                       e.printStackTrace();
+               }
+       }
+               
+       @After
+       public void tearDown() {
+               sd  = null;
+               svc = null;
+       }
+       
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
----------------------------------------------------------------------
diff --git a/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java 
b/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
new file mode 100644
index 0000000..d7bfb1d
--- /dev/null
+++ b/security-admin/src/main/java/org/apache/ranger/biz/ServiceMgr.java
@@ -0,0 +1,152 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ranger.biz;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.ranger.plugin.model.RangerService;
+import org.apache.ranger.plugin.model.RangerServiceDef;
+import org.apache.ranger.plugin.service.RangerBaseService;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
+import org.apache.ranger.plugin.store.ServiceStoreFactory;
+import org.apache.ranger.view.VXMessage;
+import org.apache.ranger.view.VXResponse;
+import org.springframework.stereotype.Component;
+
+@Component
+public class ServiceMgr {
+
+       private static final Log LOG = LogFactory.getLog(ServiceMgr.class);
+       
+       
+       public List<String> lookupResource(String serviceName, 
ResourceLookupContext context ) throws Exception {
+               List<String>      ret = null;
+               RangerBaseService svc = getRangerServiceByName(serviceName);
+
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceMgr.lookupResource for Service: 
(" + svc + "Context: " + context + ")");
+               }
+
+               if ( svc != null) {
+                       try {
+                               ret = svc.lookupResource(context);
+                       } catch ( Exception e) {
+                               LOG.error("==> ServiceMgr.lookupResource 
Error:" + e);
+                               throw e;
+                       }
+               }
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceMgr.lookupResource for Response: 
(" + ret + ")");
+               }
+               
+               return ret;
+       }
+       
+       public VXResponse validateConfig(RangerService service) throws 
Exception {
+               
+               VXResponse ret                  = new VXResponse();
+               RangerBaseService svc   = getRangerServiceByService(service);
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceMgr.validateConfig for Service: 
(" + svc + ")");
+               }
+               
+               if (svc != null) {
+                       try {
+                               HashMap<String, Object> responseData = 
svc.validateConfig();
+                               ret = generateResponseForTestConn(responseData, 
"");
+                       } catch (Exception e) {
+                               LOG.error("==> ServiceMgr.validateConfig 
Error:" + e);
+                               throw e;
+                       }
+               }
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceMgr.validateConfig for Response: 
(" + ret + ")");
+               }
+               
+               return ret;
+       }
+
+       
+  public RangerBaseService getRangerServiceByName(String serviceName) throws 
Exception{
+
+               RangerService service = 
ServiceStoreFactory.instance().getServiceStore().getServiceByName(serviceName);
+
+               RangerBaseService svc = getRangerServiceByService(service);
+               
+               return svc;
+               
+       }
+       
+       public RangerBaseService getRangerServiceByService(RangerService 
service) throws Exception{
+               
+               String serviceType                      = service.getType();
+               
+               RangerServiceDef serviceDef = 
ServiceStoreFactory.instance().getServiceStore().getServiceDefByName(serviceType);
+               
+               RangerBaseService  ret          = (RangerBaseService) 
Class.forName(serviceDef.getImplClass()).newInstance();
+               
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceMgr.getRangerServiceByService 
ServiceType: " + serviceType + "ServiceDef: " + serviceDef + "Service Class: " 
+ serviceDef.getImplClass());
+               }
+               
+               ret.init(serviceDef, service);
+               
+               return ret;
+       }
+       
+       private VXResponse generateResponseForTestConn(
+                       HashMap<String, Object> responseData, String msg) {
+               VXResponse vXResponse = new VXResponse();
+
+               Long objId = (responseData.get("objectId") != null) ? Long
+                               
.parseLong(responseData.get("objectId").toString()) : null;
+               boolean connectivityStatus = 
(responseData.get("connectivityStatus") != null) ? Boolean
+                               
.parseBoolean(responseData.get("connectivityStatus").toString())
+                               : false;
+               int statusCode = (connectivityStatus) ? 
VXResponse.STATUS_SUCCESS
+                               : VXResponse.STATUS_ERROR;
+               String message = (responseData.get("message") != null) ? 
responseData
+                               .get("message").toString() : msg;
+               String description = (responseData.get("description") != null) 
? responseData
+                               .get("description").toString() : msg;
+               String fieldName = (responseData.get("fieldName") != null) ? 
responseData
+                               .get("fieldName").toString() : null;
+
+               VXMessage vXMsg = new VXMessage();
+               List<VXMessage> vXMsgList = new ArrayList<VXMessage>();
+               vXMsg.setFieldName(fieldName);
+               vXMsg.setMessage(message);
+               vXMsg.setObjectId(objId);
+               vXMsgList.add(vXMsg);
+
+               vXResponse.setMessageList(vXMsgList);
+               vXResponse.setMsgDesc(description);
+               vXResponse.setStatusCode(statusCode);
+               return vXResponse;
+       }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/44765857/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java
----------------------------------------------------------------------
diff --git 
a/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java 
b/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java
index 645f8d1..a98f8e4 100644
--- a/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java
+++ b/security-admin/src/main/java/org/apache/ranger/rest/ServiceREST.java
@@ -19,6 +19,7 @@
 
 package org.apache.ranger.rest;
 
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
@@ -41,6 +42,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.ranger.plugin.model.RangerPolicy;
 import org.apache.ranger.plugin.model.RangerService;
 import org.apache.ranger.plugin.model.RangerServiceDef;
+import org.apache.ranger.plugin.service.ResourceLookupContext;
 import org.apache.ranger.plugin.store.ServiceStore;
 import org.apache.ranger.plugin.store.ServiceStoreFactory;
 import org.apache.ranger.plugin.util.SearchFilter;
@@ -50,6 +52,7 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.context.annotation.Scope;
 import org.springframework.security.access.prepost.PreAuthorize;
 import org.springframework.stereotype.Component;
+import org.apache.ranger.biz.ServiceMgr;
 import org.apache.ranger.common.RESTErrorUtil;
 
 
@@ -61,6 +64,9 @@ public class ServiceREST {
 
        @Autowired
        RESTErrorUtil restErrorUtil;
+       
+       @Autowired
+       ServiceMgr serviceMgr;
 
        private ServiceStore svcStore = null;
 
@@ -400,10 +406,9 @@ public class ServiceREST {
                VXResponse ret = new VXResponse();
 
                try {
-                       // TODO: svcStore.validateConfig(service);
+                       ret = serviceMgr.validateConfig(service);
                } catch(Exception excp) {
-                       ret.setStatusCode(VXResponse.STATUS_ERROR);
-                       // TODO: message
+                       throw 
restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, 
excp.getMessage(), true);
                }
 
                if(LOG.isDebugEnabled()) {
@@ -412,6 +417,31 @@ public class ServiceREST {
 
                return ret;
        }
+       
+       @POST
+       @Path("/services/lookupResource/{serviceName}")
+       @Produces({ "application/json", "application/xml" })
+       public List<String> lookupResource(@PathParam("serviceName") String 
serviceName, ResourceLookupContext context) {
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("==> ServiceREST.lookupResource(" + 
serviceName + ")");
+               }
+
+               List<String> ret = new ArrayList<String>();
+
+               try {
+                       ret = serviceMgr.lookupResource(serviceName,context);
+               } catch(Exception excp) {
+                       LOG.error("lookupResource() failed", excp);
+
+                       throw 
restErrorUtil.createRESTException(HttpServletResponse.SC_BAD_REQUEST, 
excp.getMessage(), true);
+               }
+
+               if(LOG.isDebugEnabled()) {
+                       LOG.debug("<== ServiceREST.validateConfig(" + 
serviceName + "): " + ret);
+               }
+
+               return ret;
+       }
 
 
        @POST

Reply via email to