Repository: knox
Updated Branches:
  refs/heads/master 2c9adff8a -> 5e9e3cb34


KNOX-560: Test LDAP Authentication+Authorization from KnoxCLI


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/5e9e3cb3
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/5e9e3cb3
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/5e9e3cb3

Branch: refs/heads/master
Commit: 5e9e3cb34659a6c677166842bc161ba2857d1915
Parents: 2c9adff
Author: Kevin Minder <[email protected]>
Authored: Fri Jun 12 13:21:42 2015 -0400
Committer: Kevin Minder <[email protected]>
Committed: Fri Jun 12 13:21:42 2015 -0400

----------------------------------------------------------------------
 .../org/apache/hadoop/gateway/util/KnoxCLI.java | 280 +++++++++++++++-
 .../hadoop/gateway/GatewayTestConfig.java       |   3 +-
 .../gateway/KnoxCliLdapFuncTestNegative.java    | 319 +++++++++++++++++++
 .../gateway/KnoxCliLdapFuncTestPositive.java    | 317 ++++++++++++++++++
 .../KnoxCliLdapFuncTestNegative/users.ldif      |  93 ++++++
 .../KnoxCliLdapFuncTestPositive/users.ldif      |  93 ++++++
 6 files changed, 1088 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
----------------------------------------------------------------------
diff --git 
a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java 
b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
index c21d6b0..7942d68 100644
--- a/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
+++ b/gateway-server/src/main/java/org/apache/hadoop/gateway/util/KnoxCLI.java
@@ -17,11 +17,13 @@
  */
 package org.apache.hadoop.gateway.util;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.gateway.GatewayCommandLine;
 import org.apache.hadoop.gateway.config.GatewayConfig;
 import org.apache.hadoop.gateway.config.impl.GatewayConfigImpl;
+import org.apache.hadoop.gateway.deploy.DeploymentFactory;
 import org.apache.hadoop.gateway.services.CLIGatewayServices;
 import org.apache.hadoop.gateway.services.GatewayServices;
 import org.apache.hadoop.gateway.services.Service;
@@ -31,32 +33,35 @@ import 
org.apache.hadoop.gateway.services.security.AliasService;
 import org.apache.hadoop.gateway.services.security.KeystoreService;
 import org.apache.hadoop.gateway.services.security.KeystoreServiceException;
 import org.apache.hadoop.gateway.services.security.MasterService;
+import org.apache.hadoop.gateway.topology.Provider;
 import org.apache.hadoop.gateway.topology.Topology;
 import org.apache.hadoop.gateway.topology.validation.TopologyValidator;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.PropertyConfigurator;
-import org.xml.sax.ErrorHandler;
-import org.xml.sax.SAXParseException;
-import org.xml.sax.SAXException;
-
-import javax.xml.XMLConstants;
-import javax.xml.transform.stream.StreamSource;
-import javax.xml.validation.Schema;
-import javax.xml.validation.SchemaFactory;
-import javax.xml.validation.Validator;
+import org.apache.shiro.SecurityUtils;
+import org.apache.shiro.authc.AuthenticationException;
+import org.apache.shiro.authc.UsernamePasswordToken;
+import org.apache.shiro.config.IniSecurityManagerFactory;
+import org.apache.shiro.subject.Subject;
+import org.apache.shiro.util.Factory;
+import org.jboss.shrinkwrap.api.exporter.ExplodedExporter;
+import org.jboss.shrinkwrap.api.spec.WebArchive;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import java.util.UUID;
-import java.util.LinkedList;
-
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.io.Console;
 /**
  *
  */
@@ -74,7 +79,8 @@ public class KnoxCLI extends Configured implements Tool {
       "   [" + RedeployCommand.USAGE + "]\n" +
       "   [" + RedeployCommand.USAGE + "]\n" +
       "   [" + ListTopologiesCommand.USAGE + "]\n" +
-      "   [" + ValidateTopologyCommand.USAGE + "]\n";
+      "   [" + ValidateTopologyCommand.USAGE + "]\n" +
+      "   [" + LDAPAuthCommand.USAGE + "]\n";
 
   /** allows stdout to be captured if necessary */
   public PrintStream out = System.out;
@@ -89,6 +95,10 @@ public class KnoxCLI extends Configured implements Tool {
   private String generate = "false";
   private String hostname = null;
   private boolean force = false;
+  private boolean debug = false;
+  private String user = null;
+  private String pass = null;
+  private boolean groups = false;
 
   // For testing only
   private String master = null;
@@ -129,7 +139,7 @@ public class KnoxCLI extends Configured implements Tool {
   }
 
   private void initializeServices(boolean persisting) throws 
ServiceLifecycleException {
-    GatewayConfig config = new GatewayConfigImpl();
+    GatewayConfig config = getGatewayConfig();
     Map<String,String> options = new HashMap<String,String>();
     options.put(GatewayCommandLine.PERSIST_LONG, Boolean.toString(persisting));
     if (master != null) {
@@ -150,6 +160,7 @@ public class KnoxCLI extends Configured implements Tool {
    * % knoxcli create-cert alias [--hostname h]
    * % knoxcli redeploy [--cluster clustername]
    * % knoxcli validate-topology [--cluster clustername] | [--path 
<path/to/file>]
+   * % knoxcli auth-test [--cluster clustername] [--u username] [--p password]
    * </pre>
    * @param args
    * @return
@@ -193,6 +204,13 @@ public class KnoxCLI extends Configured implements Tool {
           printKnoxShellUsage();
           return -1;
         }
+      }else if(args[i].equals("auth-test")) {
+        if(i + 1 >= args.length) {
+          printKnoxShellUsage();
+          return -1;
+        } else {
+          command = new LDAPAuthCommand();
+        }
       } else if (args[i].equals("list-alias")) {
         command = new AliasListCommand();
       } else if (args[i].equals("--value")) {
@@ -253,6 +271,24 @@ public class KnoxCLI extends Configured implements Tool {
       } else if (args[i].equals("--help")) {
         printKnoxShellUsage();
         return -1;
+      } else if(args[i].equals("--d")) {
+        this.debug = true;
+      } else if(args[i].equals("--u")) {
+        if(i + 1 <= args.length) {
+          this.user = args[++i];
+        } else{
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if(args[i].equals("--p")) {
+        if(i + 1 <= args.length) {
+          this.pass = args[++i];
+        } else{
+          printKnoxShellUsage();
+          return -1;
+        }
+      } else if (args[i].equals("--g")) {
+        this.groups = true;
       } else {
         printKnoxShellUsage();
         //ToolRunner.printGenericCommandUsage(System.err);
@@ -292,13 +328,16 @@ public class KnoxCLI extends Configured implements Tool {
       out.println( div );
       out.println( RedeployCommand.USAGE + "\n\n" + RedeployCommand.DESC );
       out.println();
-      out.println(div);
+      out.println( div );
       out.println(ValidateTopologyCommand.USAGE + "\n\n" + 
ValidateTopologyCommand.DESC);
       out.println();
-      out.println(div);
+      out.println( div );
       out.println(ListTopologiesCommand.USAGE + "\n\n" + 
ListTopologiesCommand.DESC);
       out.println();
-      out.println(div);
+      out.println( div );
+      out.println(LDAPAuthCommand.USAGE + "\n\n" + ListTopologiesCommand.DESC);
+      out.println();
+      out.println( div );
     }
   }
 
@@ -798,6 +837,215 @@ public class KnoxCLI extends Configured implements Tool {
 
   }
 
+  private class LDAPCommand extends Command {
+
+    public static final String USAGE = "ldap-command";
+    public static final String DESC = "This is an internal command. It should 
not be used.";
+    protected String username = null;
+    protected char[] password = null;
+
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+
+    @Override
+    public void execute() {
+    }
+
+    protected void promptCredentials() {
+      if(this.username == null){
+        Console c = System.console();
+        if( c != null) {
+          this.username = c.readLine("Username: ");
+        }else{
+          try {
+            BufferedReader reader = new BufferedReader(new 
InputStreamReader(System.in));
+            out.println("Username: ");
+            this.username = reader.readLine();
+            reader.close();
+          } catch (IOException e){
+            out.println(e.getMessage());
+            this.username = "";
+          }
+        }
+      }
+
+      if(this.password == null){
+        Console c = System.console();
+        if( c != null) {
+          this.password = c.readPassword("Password: ");
+        }else{
+          try {
+            BufferedReader reader = new BufferedReader(new 
InputStreamReader(System.in));
+            out.println("Password: ");
+            this.password = reader.readLine().toCharArray();
+            reader.close();
+          } catch (IOException e){
+            out.println(e.getMessage());
+            this.password = "".toCharArray();
+          }
+        }
+      }
+    }
+
+    protected Topology getTopology(String topologyName) {
+      TopologyService ts = getTopologyService();
+      ts.reloadTopologies();
+      for (Topology t : ts.getTopologies()) {
+        if(t.getName().equals(topologyName)) {
+          return t;
+        }
+      }
+      return null;
+    }
+
+  }
+
+  private class LDAPAuthCommand extends LDAPCommand {
+
+    public static final String USAGE = "auth-test [--cluster clustername] [--u 
username] [--p password] [--g]";
+    public static final String DESC = "This command tests a cluster's 
configuration ability to\n " +
+        "authenticate a user with a cluster's ShiroProvider settings.\n Use 
\"--g\" if you want to list the groups a" +
+        " user is a member of. \nOptional: [--u username]: Provide a username 
argument to the command\n" +
+        "Optional: [--p password]: Provide a password argument to the 
command.\n" +
+        "If a username and password argument are not supplied, the terminal 
will prompt you for one.";
+
+    private static final String  SUBJECT_USER_GROUPS = "subject.userGroups";
+    private HashSet<String> groupSet = new HashSet<String>();
+
+    @Override
+    public String getUsage() {
+      return USAGE + ":\n\n" + DESC;
+    }
+
+    @Override
+    public void execute() {
+      Topology t = getTopology(cluster);
+      if(user != null){
+        this.username = user;
+      }
+      if(pass != null){
+        this.password = pass.toCharArray();
+      }
+      if(t == null) {
+        out.println("ERR: Topology: " + cluster + " does not exist");
+        return;
+      }
+      if(t.getProvider("authentication", "ShiroProvider") == null) {
+        out.println("ERR: This tool currently only works with shiro as the 
authentication provider.");
+        out.println("ERR: Please update the topology to use \"ShiroProvider\" 
as the authentication provider.");
+        return;
+      }
+
+      promptCredentials();
+      if(username == null || password == null){
+        return;
+      }
+
+      File tmpDir = new File(System.getProperty("java.io.tmpdir"));
+      DeploymentFactory.setGatewayServices(services);
+      WebArchive archive = 
DeploymentFactory.createDeployment(getGatewayConfig(), t);
+      File war = archive.as(ExplodedExporter.class).exportExploded(tmpDir, 
t.getName() + "_deploy.tmp");
+      String config = war.getAbsolutePath() + "/WEB-INF/shiro.ini";
+
+      if(new File(config).exists()) {
+        if(authenticate(config)) {
+          out.println("LDAP authentication successful!");
+          if( groupSet == null || groupSet.isEmpty()){
+            out.println( username + " does not belong to any groups");
+            if(groups){
+              out.println("You were looking for this user's groups but this 
user does not belong to any.");
+              out.println("Your topology file may be incorrectly configured 
for group lookup.");
+              if(!hasGroupLookupErrors(t)){
+                out.println("Some of your topology's param values may be 
incorrect. See the Knox Docs for help");
+              }
+            }
+          } else if (!groupSet.isEmpty()) {
+            for (Object o : groupSet.toArray()) {
+              out.println(username + " is a member of: " + o.toString());
+            }
+          }
+        } else {
+          out.println("ERR: Unable to authenticate user: " + username);
+        }
+      } else {
+        out.println("ERR: No shiro config file found.");
+      }
+
+      //Cleanup temp dir with deployment files
+      try {
+        FileUtils.deleteDirectory(war);
+      } catch (IOException e) {
+        out.println(e.getMessage());
+        out.println("ERR: Error when attempting to delete temp deployment.");
+      }
+    }
+
+//    returns false if any errors are printed
+    private boolean hasGroupLookupErrors(Topology topology){
+      Provider shiro = topology.getProvider("authentication", "ShiroProvider");
+      Map<String, String> params = shiro.getParams();
+      int errs = 0;
+      errs +=  hasParam(params, "main.ldapRealm") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapGroupContextFactory") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.searchBase") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.groupObjectClass") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.memberAttributeValueTemplate") 
? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.memberAttribute") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.authorizationEnabled") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.authorizationEnabled") ? 0 : 1;
+      errs +=  hasParam(params, 
"main.ldapRealm.contextFactory.systemUsername") ? 0 : 1;
+      errs +=  hasParam(params, 
"main.ldapRealm.contextFactory.systemPassword") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.userDnTemplate") ? 0 : 1;
+      errs +=  hasParam(params, "main.ldapRealm.contextFactory.url") ? 0 : 1;
+      errs +=  hasParam(params, 
"main.ldapRealm.contextFactory.authenticationMechanism")  ? 0 : 1;
+      return errs > 0 ? true : false;
+    }
+
+    // Checks to see if the param name is present. If not, notify the user
+    private boolean hasParam(Map<String, String> params, String key){
+      if(params.get(key) == null){
+        out.println("Error: " + key + " is not present in topology");
+        return false;
+      } else { return true; }
+    }
+
+    protected boolean authenticate(String config) {
+      boolean result = false;
+      try {
+        Factory factory = new IniSecurityManagerFactory(config);
+        org.apache.shiro.mgt.SecurityManager securityManager = 
(org.apache.shiro.mgt.SecurityManager) factory.getInstance();
+        SecurityUtils.setSecurityManager(securityManager);
+        Subject subject = SecurityUtils.getSubject();
+        if (!subject.isAuthenticated()) {
+          UsernamePasswordToken token = new UsernamePasswordToken(username, 
password);
+          try {
+            subject.login(token);
+            result = subject.isAuthenticated();
+            subject.hasRole(""); //Populate subject groups
+            groupSet = (HashSet) 
subject.getSession().getAttribute(SUBJECT_USER_GROUPS);
+          } catch (AuthenticationException e) {
+            out.println(e.getMessage());
+            out.println(e.getCause().getMessage());
+            if (debug) {
+              e.printStackTrace(out);
+            } else {
+              out.println("For more info, use --d for debug output.");
+            }
+          } finally {
+            subject.logout();
+          }
+        }
+      }catch(Exception e){
+        out.println(e.getMessage());
+      } finally {
+        return result;
+
+      }
+    }
+  }
+
   private GatewayConfig getGatewayConfig() {
     GatewayConfig result;
     Configuration conf = getConf();

http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
----------------------------------------------------------------------
diff --git 
a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
index 3f9fde5..3940474 100644
--- 
a/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
+++ 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/GatewayTestConfig.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.gateway;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.gateway.config.GatewayConfig;
 
 import java.net.InetSocketAddress;
@@ -24,7 +25,7 @@ import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.List;
 
-public class GatewayTestConfig implements GatewayConfig {
+public class GatewayTestConfig extends Configuration implements GatewayConfig {
 
   private String gatewayHomeDir = "gateway-home";
   private String hadoopConfDir = "hadoop";

http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
----------------------------------------------------------------------
diff --git 
a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
new file mode 100644
index 0000000..ddd6c2a
--- /dev/null
+++ 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative.java
@@ -0,0 +1,319 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.util.KnoxCLI;;
+import org.apache.log4j.Appender;
+import org.hamcrest.Matchers;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.PrintStream;
+import java.io.InputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.net.ServerSocket;
+import java.net.URL;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+
+public class KnoxCliLdapFuncTestNegative {
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
+  private static Logger LOG = LoggerFactory.getLogger( 
KnoxCliLdapFuncTestPositive.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new 
ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new 
ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new 
File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    createTopology(topoDir, "test-cluster.xml", true);
+    createTopology(topoDir, "bad-cluster.xml", false);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void createTopology(File topoDir, String name, boolean 
goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+    if(goodTopology){
+      createTopology().toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+    stream.close();
+
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws 
IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, 
Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag( "provider" )
+        .addTag("role").addText("authentication")
+        .addTag( "name" ).addText( "ShiroProvider" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm.userDnTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:"; + 
ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        .addTag( "name" 
).addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag( "name" ).addText("urls./**")
+        .addTag("value").addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider")
+        .addTag( "role" ).addText("identity-assertion")
+        .addTag("enabled").addText("true")
+        .addTag("name").addText("Default").gotoParent()
+        .addTag("provider")
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology() {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        
.addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        
.addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value" ).addText("guest-password").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:"; + 
ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        
.addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.cachingEnabled")
+        .addTag("value").addText("false").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("com.sun.jndi.ldap.connect.pool")
+        .addTag("value").addText("false").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test
+  public void testBadTopology() throws Exception {
+
+    //    Test 4: Authenticate a user with a bad topology configured with 
nothing required for group lookup in the topology
+    outContent.reset();
+    String username = "tom";
+    String password = "tom-password";
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf( config );
+
+    String args1[] = {"auth-test", "--master", "knox", "--cluster", 
"bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args1 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication 
successful"));
+    assertFalse(outContent.toString().contains("analyst"));
+    assertThat(outContent.toString(), containsString("Your topology file may 
be incorrectly configured for group lookup"));
+    assertThat(outContent.toString(), containsString("Error:"));
+
+    outContent.reset();
+    username = "bad-name";
+    password = "bad-password";
+    cli = new KnoxCLI();
+    cli.setConf( config );
+
+    String args2[] = {"auth-test", "--master", "knox", "--cluster", 
"bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args2 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication 
failed"));
+    assertThat(outContent.toString(), containsString("INVALID_CREDENTIALS"));
+
+    outContent.reset();
+    username = "sam";
+    password = "sam-password";
+    cli = new KnoxCLI();
+    cli.setConf( config );
+
+    String args3[] = {"auth-test", "--master", "knox", "--cluster", 
"bad-cluster",
+        "--u", username, "--p", password, "--g" };
+    cli.run( args3 );
+
+    assertThat(outContent.toString(), containsString("LDAP authentication 
successful"));
+    assertThat(outContent.toString(), containsString("Your topology file may 
be incorrectly configured for group lookup"));
+    assertThat(outContent.toString(), containsString("Error:"));
+    assertFalse(outContent.toString().contains("analyst"));
+    assertFalse(outContent.toString().contains("scientist"));
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
----------------------------------------------------------------------
diff --git 
a/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
new file mode 100644
index 0000000..dc91ee0
--- /dev/null
+++ 
b/gateway-test/src/test/java/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive.java
@@ -0,0 +1,317 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway;
+
+import com.mycila.xmltool.XMLDoc;
+import com.mycila.xmltool.XMLTag;
+import org.apache.directory.server.protocol.shared.transport.TcpTransport;
+import org.apache.hadoop.gateway.security.ldap.SimpleLdapDirectoryServer;
+import org.apache.hadoop.gateway.services.DefaultGatewayServices;
+import org.apache.hadoop.gateway.services.ServiceLifecycleException;
+import org.apache.hadoop.gateway.util.KnoxCLI;;
+import org.apache.log4j.Appender;
+import org.hamcrest.Matchers;
+import org.junit.BeforeClass;
+import org.junit.AfterClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.PrintStream;
+import java.io.InputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ByteArrayOutputStream;
+import java.net.ServerSocket;
+import java.net.URL;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+import static org.hamcrest.CoreMatchers.containsString;
+import static org.junit.Assert.assertThat;
+
+public class KnoxCliLdapFuncTestPositive {
+
+  private static Class RESOURCE_BASE_CLASS = KnoxCliLdapFuncTestPositive.class;
+  private static Logger LOG = LoggerFactory.getLogger( 
KnoxCliLdapFuncTestPositive.class );
+
+  public static Enumeration<Appender> appenders;
+  public static GatewayTestConfig config;
+  public static GatewayServer gateway;
+  public static String gatewayUrl;
+  public static String clusterUrl;
+  public static SimpleLdapDirectoryServer ldap;
+  public static TcpTransport ldapTransport;
+
+  private static final ByteArrayOutputStream outContent = new 
ByteArrayOutputStream();
+  private static final ByteArrayOutputStream errContent = new 
ByteArrayOutputStream();
+  private static final String uuid = UUID.randomUUID().toString();
+
+  @BeforeClass
+  public static void setupSuite() throws Exception {
+    System.setOut(new PrintStream(outContent));
+    System.setErr(new PrintStream(errContent));
+    setupLdap();
+    setupGateway();
+  }
+
+  @AfterClass
+  public static void cleanupSuite() throws Exception {
+    ldap.stop( true );
+
+    //FileUtils.deleteQuietly( new File( config.getGatewayHomeDir() ) );
+    //NoOpAppender.tearDown( appenders );
+  }
+
+  public static void setupLdap( ) throws Exception {
+    URL usersUrl = getResourceUrl( "users.ldif" );
+    int port = findFreePort();
+    ldapTransport = new TcpTransport( port );
+    ldap = new SimpleLdapDirectoryServer( "dc=hadoop,dc=apache,dc=org", new 
File( usersUrl.toURI() ), ldapTransport );
+    ldap.start();
+    LOG.info( "LDAP port = " + ldapTransport.getPort() );
+  }
+
+  public static void setupGateway() throws Exception {
+
+    File targetDir = new File( System.getProperty( "user.dir" ), "target" );
+    File gatewayDir = new File( targetDir, "gateway-home-" + uuid );
+    gatewayDir.mkdirs();
+
+    GatewayTestConfig testConfig = new GatewayTestConfig();
+    config = testConfig;
+    testConfig.setGatewayHomeDir( gatewayDir.getAbsolutePath() );
+
+    File topoDir = new File( testConfig.getGatewayTopologyDir() );
+    topoDir.mkdirs();
+
+    File deployDir = new File( testConfig.getGatewayDeploymentDir() );
+    deployDir.mkdirs();
+
+    createTopology(topoDir, "test-cluster.xml", true);
+    createTopology(topoDir, "bad-cluster.xml", false);
+
+    DefaultGatewayServices srvcs = new DefaultGatewayServices();
+    Map<String,String> options = new HashMap<String,String>();
+    options.put( "persist-master", "false" );
+    options.put( "master", "password" );
+    try {
+      srvcs.init( testConfig, options );
+    } catch ( ServiceLifecycleException e ) {
+      e.printStackTrace(); // I18N not required.
+    }
+  }
+
+  private static void createTopology(File topoDir, String name, boolean 
goodTopology) throws Exception {
+    File descriptor = new File(topoDir, name);
+
+    if(descriptor.exists()){
+      descriptor.delete();
+      descriptor = new File(topoDir, name);
+    }
+
+    FileOutputStream stream = new FileOutputStream( descriptor, false );
+    if(goodTopology){
+      createTopology().toStream( stream );
+    } else {
+      createBadTopology().toStream( stream );
+    }
+    stream.close();
+
+  }
+
+  private static int findFreePort() throws IOException {
+    ServerSocket socket = new ServerSocket(0);
+    int port = socket.getLocalPort();
+    socket.close();
+    return port;
+  }
+
+  public static InputStream getResourceStream( String resource ) throws 
IOException {
+    return getResourceUrl( resource ).openStream();
+  }
+
+  public static URL getResourceUrl( String resource ) {
+    URL url = ClassLoader.getSystemResource( getResourceName( resource ) );
+    assertThat( "Failed to find test resource " + resource, url, 
Matchers.notNullValue() );
+    return url;
+  }
+
+  public static String getResourceName( String resource ) {
+    return getResourceBaseName() + resource;
+  }
+
+  public static String getResourceBaseName() {
+    return RESOURCE_BASE_CLASS.getName().replaceAll( "\\.", "/" ) + "/";
+  }
+
+  private static XMLTag createBadTopology(){
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag( "gateway" )
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag( "param" )
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:"; + 
ldapTransport.getPort()).gotoParent()
+        .addTag( "param" )
+        
.addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag( "name").addText( "urls./**")
+        .addTag("value").addText( "authcBasic" ).gotoParent().gotoParent()
+        .addTag( "provider" )
+        .addTag( "role" ).addText( "identity-assertion" )
+        .addTag( "enabled" ).addText( "true" )
+        .addTag( "name" ).addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service")
+        .addTag("role").addText( "KNOX" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  private static XMLTag createTopology() {
+
+    XMLTag xml = XMLDoc.newDocument(true)
+        .addRoot("topology")
+        .addTag("gateway")
+        .addTag("provider")
+        .addTag("role").addText("authentication")
+        .addTag("name").addText("ShiroProvider")
+        .addTag("enabled").addText("true")
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapGroupContextFactory")
+        
.addTag("value").addText("org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.searchBase")
+        
.addTag("value").addText("ou=groups,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.groupObjectClass")
+        .addTag("value").addText("groupOfNames").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.memberAttributeValueTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param" )
+        .addTag("name").addText("main.ldapRealm.memberAttribute")
+        .addTag("value").addText("member").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.authorizationEnabled")
+        .addTag("value").addText("true").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemUsername")
+        
.addTag("value").addText("uid=guest,ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.systemPassword")
+        .addTag( "value" ).addText("guest-password").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.userDnTemplate")
+        
.addTag("value").addText("uid={0},ou=people,dc=hadoop,dc=apache,dc=org").gotoParent()
+        .addTag("param")
+        .addTag("name").addText("main.ldapRealm.contextFactory.url")
+        .addTag("value").addText("ldap://localhost:"; + 
ldapTransport.getPort()).gotoParent()
+        .addTag("param")
+        
.addTag("name").addText("main.ldapRealm.contextFactory.authenticationMechanism")
+        .addTag("value").addText("simple").gotoParent()
+        .addTag("param")
+        .addTag("name" ).addText("urls./**")
+        .addTag("value" ).addText("authcBasic").gotoParent().gotoParent()
+        .addTag("provider" )
+        .addTag("role").addText( "identity-assertion" )
+        .addTag( "enabled").addText( "true" )
+        .addTag("name").addText( "Default" ).gotoParent()
+        .gotoRoot()
+        .addTag( "service" )
+        .addTag( "role" ).addText( "test-service-role" )
+        .gotoRoot();
+    // System.out.println( "GATEWAY=" + xml.toString() );
+    return xml;
+  }
+
+  @Test
+  public void testLDAPAuth() throws Exception {
+
+//    Test 1: Make sure authenication is successful and return groups
+    outContent.reset();
+    String username = "sam";
+    String password = "sam-password";
+    String args[] = {"auth-test", "--master", "knox", "--cluster", 
"test-cluster", "--u", username, "--p", password,
+        "--g"};
+    KnoxCLI cli = new KnoxCLI();
+    cli.setConf(config);
+    cli.run(args);
+    assertThat(outContent.toString(), containsString("success"));
+    assertThat(outContent.toString(), containsString("analyst"));
+    assertThat(outContent.toString(), containsString("scientist"));
+
+//    Test 2: Give an invalid name and password combinatinon.
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "bad-name";
+    password = "bad-password";
+    String args2[] = {"auth-test", "--master", "knox", "--cluster", 
"test-cluster", "--u", username, "--p", password};
+    cli.run( args2 );
+    assertThat(outContent.toString(), containsString("LDAP authentication 
failed"));
+
+//    Test 3: Authenticate a user who belongs to no groups, but specify groups 
with --g
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "guest";
+    password = "guest-password";
+    String args3[] = {"auth-test", "--master", "knox", "--cluster", 
"test-cluster",
+        "--u", username, "--p", password };
+    cli.run( args3 );
+    assertThat(outContent.toString(), containsString("LDAP authentication 
success"));
+    assertThat(outContent.toString(), containsString("does not belong to any 
groups"));
+
+    //    Test 4: Pass a non-existent topology
+    outContent.reset();
+    cli = new KnoxCLI();
+    cli.setConf(config);
+    username = "guest";
+    password = "guest-password";
+    String args4[] = {"auth-test", "--master", "knox", "--cluster", 
"cluster-dne",
+        "--u", username, "--p", password };
+    cli.run( args4 );
+    assertThat(outContent.toString(), containsString("ERR: Topology"));
+    assertThat(outContent.toString(), containsString("does not exist"));
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative/users.ldif
----------------------------------------------------------------------
diff --git 
a/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative/users.ldif
 
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative/users.ldif
new file mode 100644
index 0000000..213be08
--- /dev/null
+++ 
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestNegative/users.ldif
@@ -0,0 +1,93 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# this ldif file is provided as a template to illustrate
+# use of ldapgroup(s)
+
+version: 1
+
+# Please replace with site specific values
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+# entry for a sample people container
+# please replace with site specific values
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+# entry for a sample end user
+# please replace with site specific values
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: User
+uid: guest
+userPassword:guest-password
+
+# entry for sample user sam
+dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: sam
+sn: sam
+uid: sam
+userPassword:sam-password
+
+# entry for sample user tom
+dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: tom
+sn: tom
+uid: tom
+userPassword:tom-password
+
+# create FIRST Level groups branch
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+description: generic groups branch
+
+# create the analyst group under groups
+dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: analyst
+description:analyst  group
+member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
+member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org
+
+
+# create the scientist group under groups
+dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: scientist
+description: scientist group
+member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/5e9e3cb3/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive/users.ldif
----------------------------------------------------------------------
diff --git 
a/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive/users.ldif
 
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive/users.ldif
new file mode 100644
index 0000000..213be08
--- /dev/null
+++ 
b/gateway-test/src/test/resources/org/apache/hadoop/gateway/KnoxCliLdapFuncTestPositive/users.ldif
@@ -0,0 +1,93 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# this ldif file is provided as a template to illustrate
+# use of ldapgroup(s)
+
+version: 1
+
+# Please replace with site specific values
+dn: dc=hadoop,dc=apache,dc=org
+objectclass: organization
+objectclass: dcObject
+o: Hadoop
+dc: hadoop
+
+# entry for a sample people container
+# please replace with site specific values
+dn: ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: people
+
+# entry for a sample end user
+# please replace with site specific values
+dn: uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: Guest
+sn: User
+uid: guest
+userPassword:guest-password
+
+# entry for sample user sam
+dn: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: sam
+sn: sam
+uid: sam
+userPassword:sam-password
+
+# entry for sample user tom
+dn: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:person
+objectclass:organizationalPerson
+objectclass:inetOrgPerson
+cn: tom
+sn: tom
+uid: tom
+userPassword:tom-password
+
+# create FIRST Level groups branch
+dn: ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass:organizationalUnit
+ou: groups
+description: generic groups branch
+
+# create the analyst group under groups
+dn: cn=analyst,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: analyst
+description:analyst  group
+member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
+member: uid=tom,ou=people,dc=hadoop,dc=apache,dc=org
+
+
+# create the scientist group under groups
+dn: cn=scientist,ou=groups,dc=hadoop,dc=apache,dc=org
+objectclass:top
+objectclass: groupofnames
+cn: scientist
+description: scientist group
+member: uid=sam,ou=people,dc=hadoop,dc=apache,dc=org
\ No newline at end of file

Reply via email to