Repository: incubator-ranger
Updated Branches:
  refs/heads/master 27a70dd1a -> eb31b7e95


RANGER-1006 - Add tests for the HDFS plugin

Reviewed by Ramesh Mani


Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/eb31b7e9
Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/eb31b7e9
Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/eb31b7e9

Branch: refs/heads/master
Commit: eb31b7e95783539ccbc67dbc8a70f5544b5c88b0
Parents: 27a70dd
Author: Colm O hEigeartaigh <[email protected]>
Authored: Fri Jun 3 15:10:14 2016 +0100
Committer: Colm O hEigeartaigh <[email protected]>
Committed: Fri Jun 3 15:10:14 2016 +0100

----------------------------------------------------------------------
 hdfs-agent/pom.xml                              |  16 +
 .../ranger/services/hdfs/HDFSRangerTest.java    | 314 ++++++++++++++
 .../ranger/services/hdfs/HdfsClientTester.java  |  79 ----
 .../services/hdfs/RangerAdminClientImpl.java    |  84 ++++
 .../services/hdfs/TestRangerServiceHdfs.java    | 138 -------
 .../src/test/resources/hdfs-policies.json       | 414 +++++++++++++++++++
 .../src/test/resources/ranger-hdfs-security.xml |  45 ++
 7 files changed, 873 insertions(+), 217 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/pom.xml
----------------------------------------------------------------------
diff --git a/hdfs-agent/pom.xml b/hdfs-agent/pom.xml
index f5e42eb..33ddd81 100644
--- a/hdfs-agent/pom.xml
+++ b/hdfs-agent/pom.xml
@@ -123,6 +123,13 @@
             <groupId>junit</groupId>
             <artifactId>junit</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-minicluster</artifactId>
+            <version>${hadoop.version}</version>
+            <scope>test</scope>
+        </dependency>
+
     </dependencies>
     <build>
         <!--
@@ -168,5 +175,14 @@
         <!--
     </pluginManagement>
         -->
+        <testResources>
+            <testResource>
+                <directory>src/test/resources</directory>
+                <includes>
+                    <include>**/*</include>
+                </includes>
+                <filtering>true</filtering>
+            </testResource>
+        </testResources>
     </build>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HDFSRangerTest.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HDFSRangerTest.java 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HDFSRangerTest.java
new file mode 100644
index 0000000..56aac0b
--- /dev/null
+++ 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HDFSRangerTest.java
@@ -0,0 +1,314 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ranger.services.hdfs;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.security.PrivilegedExceptionAction;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.LocatedFileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer;
+import 
org.apache.ranger.authorization.hadoop.exceptions.RangerAccessControlException;
+import org.junit.Assert;
+
+/**
+ * Here we plug the Ranger AccessControlEnforcer into HDFS. 
+ * 
+ * A custom RangerAdminClient is plugged into Ranger in turn, which loads 
security policies from a local file. These policies were 
+ * generated in the Ranger Admin UI for a service called "HDFSTest". It 
contains three policies, each of which grants read, write and
+ * execute permissions in turn to "/tmp/tmpdir", "/tmp/tmpdir2" and 
"/tmp/tmpdir3" to a user called "bob" and to a group called "IT".
+ */
+public class HDFSRangerTest {
+    
+    private static final File baseDir = new 
File("./target/hdfs/").getAbsoluteFile();
+    private static MiniDFSCluster hdfsCluster;
+    private static String defaultFs;
+    
+    @org.junit.BeforeClass
+    public static void setup() throws Exception {
+        Configuration conf = new Configuration();
+        conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, 
baseDir.getAbsolutePath());
+        conf.set("dfs.namenode.inode.attributes.provider.class", 
RangerHdfsAuthorizer.class.getName());
+        MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(conf);
+        hdfsCluster = builder.build();
+        defaultFs = conf.get("fs.defaultFS");
+    }
+    
+    @org.junit.AfterClass
+    public static void cleanup() throws Exception {
+        FileUtil.fullyDelete(baseDir);
+        hdfsCluster.shutdown();
+    }
+    
+    @org.junit.Test
+    public void readTest() throws Exception {
+        FileSystem fileSystem = hdfsCluster.getFileSystem();
+        
+        // Write a file - the AccessControlEnforcer won't be invoked as we are 
the "superuser"
+        final Path file = new Path("/tmp/tmpdir/data-file2");
+        FSDataOutputStream out = fileSystem.create(file);
+        for (int i = 0; i < 1024; ++i) {
+            out.write(("data" + i + "\n").getBytes("UTF-8"));
+            out.flush();
+        }
+        out.close();
+        
+        // Change permissions to read-only
+        fileSystem.setPermission(file, new FsPermission(FsAction.READ, 
FsAction.NONE, FsAction.NONE));
+        
+        // Now try to read the file as "bob" - this should be allowed (by the 
policy - user)
+        UserGroupInformation ugi = 
UserGroupInformation.createUserForTesting("bob", new String[] {});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Read the file
+                FSDataInputStream in = fs.open(file);
+                ByteArrayOutputStream output = new ByteArrayOutputStream();
+                IOUtils.copy(in, output);
+                String content = new String(output.toByteArray());
+                Assert.assertTrue(content.startsWith("data0"));
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Now try to read the file as "alice" - this should be allowed (by 
the policy - group)
+        ugi = UserGroupInformation.createUserForTesting("alice", new String[] 
{"IT"});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Read the file
+                FSDataInputStream in = fs.open(file);
+                ByteArrayOutputStream output = new ByteArrayOutputStream();
+                IOUtils.copy(in, output);
+                String content = new String(output.toByteArray());
+                Assert.assertTrue(content.startsWith("data0"));
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Now try to read the file as unknown user "eve" - this should not be 
allowed
+        ugi = UserGroupInformation.createUserForTesting("eve", new String[] 
{});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Read the file
+                try {
+                    fs.open(file);
+                    Assert.fail("Failure expected on an incorrect permission");
+                } catch (RemoteException ex) {
+                    // expected
+                    
Assert.assertTrue(RangerAccessControlException.class.getName().equals(ex.getClassName()));
+                }
+                
+                fs.close();
+                return null;
+            }
+        });
+    }
+    
+    @org.junit.Test
+    public void writeTest() throws Exception {
+        
+        FileSystem fileSystem = hdfsCluster.getFileSystem();
+        
+        // Write a file - the AccessControlEnforcer won't be invoked as we are 
the "superuser"
+        final Path file = new Path("/tmp/tmpdir2/data-file3");
+        FSDataOutputStream out = fileSystem.create(file);
+        for (int i = 0; i < 1024; ++i) {
+            out.write(("data" + i + "\n").getBytes("UTF-8"));
+            out.flush();
+        }
+        out.close();
+        
+        // Now try to write to the file as "bob" - this should be allowed (by 
the policy - user)
+        UserGroupInformation ugi = 
UserGroupInformation.createUserForTesting("bob", new String[] {});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Write to the file
+                fs.append(file);
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Now try to write to the file as "alice" - this should be allowed 
(by the policy - group)
+        ugi = UserGroupInformation.createUserForTesting("alice", new String[] 
{"IT"});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Write to the file
+                fs.append(file);
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Now try to read the file as unknown user "eve" - this should not be 
allowed
+        ugi = UserGroupInformation.createUserForTesting("eve", new String[] 
{});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Write to the file
+                try {
+                    fs.append(file);
+                    Assert.fail("Failure expected on an incorrect permission");
+                } catch (RemoteException ex) {
+                    // expected
+                    
Assert.assertTrue(RangerAccessControlException.class.getName().equals(ex.getClassName()));
+                }
+                
+                fs.close();
+                return null;
+            }
+        });
+    }
+ 
+    @org.junit.Test
+    public void executeTest() throws Exception {
+        FileSystem fileSystem = hdfsCluster.getFileSystem();
+        
+        // Write a file - the AccessControlEnforcer won't be invoked as we are 
the "superuser"
+        final Path file = new Path("/tmp/tmpdir3/data-file2");
+        FSDataOutputStream out = fileSystem.create(file);
+        for (int i = 0; i < 1024; ++i) {
+            out.write(("data" + i + "\n").getBytes("UTF-8"));
+            out.flush();
+        }
+        out.close();
+        
+        // Change permissions to read-only
+        fileSystem.setPermission(file, new FsPermission(FsAction.READ, 
FsAction.NONE, FsAction.NONE));
+        
+        // Change the parent directory permissions to be execute only for the 
owner
+        Path parentDir = new Path("/tmp/tmpdir3");
+        fileSystem.setPermission(parentDir, new FsPermission(FsAction.EXECUTE, 
FsAction.NONE, FsAction.NONE));
+        
+        // Try to read the directory as "bob" - this should be allowed (by the 
policy - user)
+        UserGroupInformation ugi = 
UserGroupInformation.createUserForTesting("bob", new String[] {});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                RemoteIterator<LocatedFileStatus> iter = 
fs.listFiles(file.getParent(), false);
+                Assert.assertTrue(iter.hasNext());
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Try to read the directory as "alice" - this should be allowed (by 
the policy - group)
+        ugi = UserGroupInformation.createUserForTesting("alice", new String[] 
{"IT"});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                RemoteIterator<LocatedFileStatus> iter = 
fs.listFiles(file.getParent(), false);
+                Assert.assertTrue(iter.hasNext());
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+        // Now try to read the directory as unknown user "eve" - this should 
not be allowed
+        ugi = UserGroupInformation.createUserForTesting("eve", new String[] 
{});
+        ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+            public Void run() throws Exception {
+                Configuration conf = new Configuration();
+                conf.set("fs.defaultFS", defaultFs);
+                
+                FileSystem fs = FileSystem.get(conf);
+                
+                // Write to the file
+                try {
+                    RemoteIterator<LocatedFileStatus> iter = 
fs.listFiles(file.getParent(), false);
+                    Assert.assertTrue(iter.hasNext());
+                    Assert.fail("Failure expected on an incorrect permission");
+                } catch (RemoteException ex) {
+                    // expected
+                    
Assert.assertTrue(RangerAccessControlException.class.getName().equals(ex.getClassName()));
+                }
+                
+                fs.close();
+                return null;
+            }
+        });
+        
+    }
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
deleted file mode 100644
index ff2802a..0000000
--- 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/HdfsClientTester.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.services.hdfs;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Properties;
-
-import org.apache.ranger.services.hdfs.client.HdfsClient;
-
-public class HdfsClientTester {
-
-       public static void main(String[] args) throws Throwable {
-               if (args.length < 3) {
-                       System.err.println("USAGE: java " + 
HdfsClient.class.getName() + " repositoryName propertyFile basedirectory  
[filenameToMatch]") ;
-                       System.exit(1) ;
-               }
-               
-               String repositoryName = args[0] ;
-               String propFile = args[1] ;
-               String baseDir = args[2] ;
-               String fileNameToMatch = (args.length == 3 ? null : args[3]) ;
-
-               Properties conf = new Properties() ;
-               
-               InputStream in = 
HdfsClientTester.class.getClassLoader().getResourceAsStream(propFile) ;
-               try {
-                       conf.load(in);
-               }
-               finally {
-                       if (in != null) {
-                               try {
-                               in.close() ;
-                               }
-                               catch(IOException ioe) {
-                                       // Ignore IOException created during 
close
-                               }
-                       }
-               }
-               
-               HashMap<String,String> prop = new HashMap<String,String>() ;
-               for(Object key : conf.keySet()) {
-                       Object val = conf.get(key) ;
-                       prop.put((String)key, (String)val) ;
-               }
-               
-               HdfsClient fs = new HdfsClient(repositoryName, prop) ;
-               List<String> fsList = fs.listFiles(baseDir, 
fileNameToMatch,null) ;
-               if (fsList != null && fsList.size() > 0) {
-                       for(String s : fsList) {
-                               System.out.println(s) ;
-                       }
-               }
-               else {
-                       System.err.println("Unable to get file listing for [" + 
baseDir + (baseDir.endsWith("/") ? "" : "/") + fileNameToMatch + "]  in 
repository [" + repositoryName + "]") ;
-               }
-
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/RangerAdminClientImpl.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/RangerAdminClientImpl.java
 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/RangerAdminClientImpl.java
new file mode 100644
index 0000000..06150f9
--- /dev/null
+++ 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/RangerAdminClientImpl.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ranger.services.hdfs;
+
+import java.io.File;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.util.List;
+
+import org.apache.ranger.admin.client.RangerAdminClient;
+import org.apache.ranger.plugin.util.GrantRevokeRequest;
+import org.apache.ranger.plugin.util.ServicePolicies;
+import org.apache.ranger.plugin.util.ServiceTags;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+
+/**
+ * A test implementation of the RangerAdminClient interface that just reads 
policies in from a file and returns them
+ */
+public class RangerAdminClientImpl implements RangerAdminClient {
+    private static final Logger LOG = 
LoggerFactory.getLogger(RangerAdminClientImpl.class);
+    private final static String cacheFilename = "hdfs-policies.json";
+    private Gson gson;
+
+    public void init(String serviceName, String appId, String 
configPropertyPrefix) {
+        Gson gson = null;
+        try {
+            gson = new 
GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create();
+        } catch(Throwable excp) {
+            LOG.error("RangerAdminClientImpl: failed to create GsonBuilder 
object", excp);
+        }
+        this.gson = gson;
+    }
+
+    public ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion) 
throws Exception {
+
+        String basedir = System.getProperty("basedir");
+        if (basedir == null) {
+            basedir = new File(".").getCanonicalPath();
+        }
+
+        java.nio.file.Path cachePath = 
FileSystems.getDefault().getPath(basedir, "/src/test/resources/" + 
cacheFilename);
+        byte[] cacheBytes = Files.readAllBytes(cachePath);
+
+        return gson.fromJson(new String(cacheBytes), ServicePolicies.class);
+    }
+
+    public void grantAccess(GrantRevokeRequest request) throws Exception {
+        
+    }
+
+    public void revokeAccess(GrantRevokeRequest request) throws Exception {
+        
+    }
+
+    public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion) throws 
Exception {
+        return null;
+        
+    }
+
+    public List<String> getTagTypes(String tagTypePattern) throws Exception {
+        return null;
+    }
+
+    
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
 
b/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
deleted file mode 100644
index 053fbb8..0000000
--- 
a/hdfs-agent/src/test/java/org/apache/ranger/services/hdfs/TestRangerServiceHdfs.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.ranger.services.hdfs;
-
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.ranger.plugin.model.RangerService;
-import org.apache.ranger.plugin.model.RangerServiceDef;
-import org.apache.ranger.plugin.service.ResourceLookupContext;
-import org.apache.ranger.services.hdfs.RangerServiceHdfs;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
-
-public class TestRangerServiceHdfs {
-       static final String     sdName            =  "svcDef-Hdfs";
-       static final String     serviceName   =  "Hdfsdev";
-       HashMap<String, Object> responseData  =  null;
-       Map<String, String>     configs           =  null;
-       RangerServiceHdfs               svcHdfs           =  null;
-       RangerServiceDef                sd                        =  null;
-       RangerService                   svc                       =  null;
-       ResourceLookupContext   lookupContext =  null;
-       
-       
-       @Before
-       public void setup() {
-               configs         = new HashMap<String,String>();
-               lookupContext = new ResourceLookupContext();
-               
-               buildHdfsConnectionConfig();
-               buildLookupContext();
-
-               sd               = new RangerServiceDef(sdName, 
"org.apache.ranger.service.hdfs.RangerServiceHdfs", "TestService", "test 
servicedef description", null, null, null, null, null, null, null);
-               svc      = new RangerService(sdName, serviceName, "unit test 
hdfs resource lookup and validateConfig", null, configs);
-               svcHdfs = new RangerServiceHdfs();
-               svcHdfs.init(sd, svc);
-       }
-       
-       @Test
-       public void testValidateConfig() {
-
-               /* TODO: does this test require a live HDFS environment?
-                *
-               HashMap<String,Object> ret = null;
-               String errorMessage = null;
-               
-               try { 
-                       ret = svcHdfs.validateConfig();
-               }catch (Exception e) {
-                       errorMessage = e.getMessage();
-               }
-               System.out.println(errorMessage);
-               if ( errorMessage != null) {
-                       assertTrue(errorMessage.contains("listFilesInternal"));
-               } else {
-                       assertNotNull(ret);
-               }
-               *
-               */
-       }
-       
-       
-       @Test
-       public void     testLookUpResource() {
-               /* TODO: does this test require a live HDFS environment?
-                *
-               List<String> ret        = new ArrayList<String>();
-               String errorMessage = null;
-               try {
-                       ret = svcHdfs.lookupResource(lookupContext);
-               }catch (Exception e) {
-                       errorMessage = e.getMessage();
-               }
-               System.out.println(errorMessage);
-               if ( errorMessage != null) {
-                       assertNotNull(errorMessage);
-               } else {
-                       assertNotNull(ret);
-               }
-               *
-               */
-       }
-       
-       public void buildHdfsConnectionConfig() {
-               configs.put("username", "hdfsuser");
-               configs.put("password", "*******");
-               configs.put("fs.default.name", "hdfs://localhost:8020");
-               configs.put("hadoop.security.authorization","");
-               configs.put("hadoop.security.auth_to_local","");
-               configs.put("dfs.datanode.kerberos.principa","");
-               configs.put("dfs.namenode.kerberos.principal","");
-               configs.put("dfs.secondary.namenode.kerberos.principal","");
-               configs.put("commonNameForCertificate","");
-               configs.put("isencrypted","true");
-       }
-
-       public void buildLookupContext() {
-               Map<String, List<String>> resourceMap = new 
HashMap<String,List<String>>();
-               resourceMap.put(null, null);
-               lookupContext.setUserInput("app");
-               lookupContext.setResourceName(null);
-               lookupContext.setResources(resourceMap);
-       }
-       
-                       
-       @After
-       public void tearDown() {
-               sd  = null;
-               svc = null;
-       }
-       
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/resources/hdfs-policies.json
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/test/resources/hdfs-policies.json 
b/hdfs-agent/src/test/resources/hdfs-policies.json
new file mode 100644
index 0000000..c99c811
--- /dev/null
+++ b/hdfs-agent/src/test/resources/hdfs-policies.json
@@ -0,0 +1,414 @@
+{
+  "serviceName": "HDFSTest",
+  "serviceId": 6,
+  "policyVersion": 24,
+  "policyUpdateTime": "20160530-12:34:17.000-+0100",
+  "policies": [
+    {
+      "service": "HDFSTest",
+      "name": "TmpdirWrite",
+      "description": "",
+      "resourceSignature": "532dfdda3e85610c42ffaeb34f82610f",
+      "isAuditEnabled": true,
+      "resources": {
+        "path": {
+          "values": [
+            "/tmp/tmpdir2"
+          ],
+          "isExcludes": false,
+          "isRecursive": true
+        }
+      },
+      "policyItems": [
+        {
+          "accesses": [
+            {
+              "type": "write",
+              "isAllowed": true
+            }
+          ],
+          "users": [],
+          "groups": [
+            "IT"
+          ],
+          "conditions": [],
+          "delegateAdmin": false
+        },
+        {
+          "accesses": [
+            {
+              "type": "write",
+              "isAllowed": true
+            }
+          ],
+          "users": [
+            "bob"
+          ],
+          "groups": [],
+          "conditions": [],
+          "delegateAdmin": false
+        }
+      ],
+      "denyPolicyItems": [],
+      "allowExceptions": [],
+      "denyExceptions": [],
+      "dataMaskPolicyItems": [],
+      "rowFilterPolicyItems": [],
+      "id": 27,
+      "guid": "1464353414767_67_927",
+      "isEnabled": true,
+      "createdBy": "Admin",
+      "updatedBy": "Admin",
+      "createTime": "20160527-13:50:14.000-+0100",
+      "updateTime": "20160527-13:50:14.000-+0100",
+      "version": 1
+    },
+    {
+      "service": "HDFSTest",
+      "name": "TmpdirExecute",
+      "description": "",
+      "resourceSignature": "1270d836e3dd33e61abda30413111851",
+      "isAuditEnabled": true,
+      "resources": {
+        "path": {
+          "values": [
+            "/tmp/tmpdir3"
+          ],
+          "isExcludes": false,
+          "isRecursive": true
+        }
+      },
+      "policyItems": [
+        {
+          "accesses": [
+            {
+              "type": "execute",
+              "isAllowed": true
+            }
+          ],
+          "users": [],
+          "groups": [
+            "IT"
+          ],
+          "conditions": [],
+          "delegateAdmin": false
+        },
+        {
+          "accesses": [
+            {
+              "type": "execute",
+              "isAllowed": true
+            }
+          ],
+          "users": [
+            "bob"
+          ],
+          "groups": [],
+          "conditions": [],
+          "delegateAdmin": false
+        }
+      ],
+      "denyPolicyItems": [],
+      "allowExceptions": [],
+      "denyExceptions": [],
+      "dataMaskPolicyItems": [],
+      "rowFilterPolicyItems": [],
+      "id": 28,
+      "guid": "1464606028409_457_979",
+      "isEnabled": true,
+      "createdBy": "Admin",
+      "updatedBy": "Admin",
+      "createTime": "20160530-12:00:28.000-+0100",
+      "updateTime": "20160530-12:00:28.000-+0100",
+      "version": 1
+    },
+    {
+      "service": "HDFSTest",
+      "name": "TmpdirRead",
+      "description": "",
+      "resourceSignature": "83af98ea40841d0e7b1b53175f70506c",
+      "isAuditEnabled": true,
+      "resources": {
+        "path": {
+          "values": [
+            "/tmp/tmpdir"
+          ],
+          "isExcludes": false,
+          "isRecursive": true
+        }
+      },
+      "policyItems": [
+        {
+          "accesses": [
+            {
+              "type": "read",
+              "isAllowed": true
+            }
+          ],
+          "users": [],
+          "groups": [
+            "IT"
+          ],
+          "conditions": [],
+          "delegateAdmin": false
+        },
+        {
+          "accesses": [
+            {
+              "type": "read",
+              "isAllowed": true
+            }
+          ],
+          "users": [
+            "bob"
+          ],
+          "groups": [],
+          "conditions": [],
+          "delegateAdmin": false
+        }
+      ],
+      "denyPolicyItems": [],
+      "allowExceptions": [],
+      "denyExceptions": [],
+      "dataMaskPolicyItems": [],
+      "rowFilterPolicyItems": [],
+      "id": 29,
+      "guid": "1464607736570_430_1010",
+      "isEnabled": true,
+      "createdBy": "Admin",
+      "updatedBy": "Admin",
+      "createTime": "20160530-12:28:56.000-+0100",
+      "updateTime": "20160530-12:34:17.000-+0100",
+      "version": 3
+    }
+  ],
+  "serviceDef": {
+    "name": "hdfs",
+    "implClass": "org.apache.ranger.services.hdfs.RangerServiceHdfs",
+    "label": "HDFS Repository",
+    "description": "HDFS Repository",
+    "options": {},
+    "configs": [
+      {
+        "itemId": 1,
+        "name": "username",
+        "type": "string",
+        "subType": "",
+        "mandatory": true,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Username"
+      },
+      {
+        "itemId": 2,
+        "name": "password",
+        "type": "password",
+        "subType": "",
+        "mandatory": true,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Password"
+      },
+      {
+        "itemId": 3,
+        "name": "fs.default.name",
+        "type": "string",
+        "subType": "",
+        "mandatory": true,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Namenode URL"
+      },
+      {
+        "itemId": 4,
+        "name": "hadoop.security.authorization",
+        "type": "bool",
+        "subType": "YesTrue:NoFalse",
+        "mandatory": true,
+        "defaultValue": "false",
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Authorization Enabled"
+      },
+      {
+        "itemId": 5,
+        "name": "hadoop.security.authentication",
+        "type": "enum",
+        "subType": "authnType",
+        "mandatory": true,
+        "defaultValue": "simple",
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Authentication Type"
+      },
+      {
+        "itemId": 6,
+        "name": "hadoop.security.auth_to_local",
+        "type": "string",
+        "subType": "",
+        "mandatory": false,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": ""
+      },
+      {
+        "itemId": 7,
+        "name": "dfs.datanode.kerberos.principal",
+        "type": "string",
+        "subType": "",
+        "mandatory": false,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": ""
+      },
+      {
+        "itemId": 8,
+        "name": "dfs.namenode.kerberos.principal",
+        "type": "string",
+        "subType": "",
+        "mandatory": false,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": ""
+      },
+      {
+        "itemId": 9,
+        "name": "dfs.secondary.namenode.kerberos.principal",
+        "type": "string",
+        "subType": "",
+        "mandatory": false,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": ""
+      },
+      {
+        "itemId": 10,
+        "name": "hadoop.rpc.protection",
+        "type": "enum",
+        "subType": "rpcProtection",
+        "mandatory": false,
+        "defaultValue": "authentication",
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "RPC Protection Type"
+      },
+      {
+        "itemId": 11,
+        "name": "commonNameForCertificate",
+        "type": "string",
+        "subType": "",
+        "mandatory": false,
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Common Name for Certificate"
+      }
+    ],
+    "resources": [
+      {
+        "itemId": 1,
+        "name": "path",
+        "type": "path",
+        "level": 10,
+        "mandatory": true,
+        "lookupSupported": true,
+        "recursiveSupported": true,
+        "excludesSupported": false,
+        "matcher": 
"org.apache.ranger.plugin.resourcematcher.RangerPathResourceMatcher",
+        "matcherOptions": {
+          "wildCard": "true",
+          "ignoreCase": "false"
+        },
+        "validationRegEx": "",
+        "validationMessage": "",
+        "uiHint": "",
+        "label": "Resource Path",
+        "description": "HDFS file or directory path"
+      }
+    ],
+    "accessTypes": [
+      {
+        "itemId": 1,
+        "name": "read",
+        "label": "Read",
+        "impliedGrants": []
+      },
+      {
+        "itemId": 2,
+        "name": "write",
+        "label": "Write",
+        "impliedGrants": []
+      },
+      {
+        "itemId": 3,
+        "name": "execute",
+        "label": "Execute",
+        "impliedGrants": []
+      }
+    ],
+    "policyConditions": [],
+    "contextEnrichers": [],
+    "enums": [
+      {
+        "itemId": 1,
+        "name": "authnType",
+        "elements": [
+          {
+            "itemId": 1,
+            "name": "simple",
+            "label": "Simple"
+          },
+          {
+            "itemId": 2,
+            "name": "kerberos",
+            "label": "Kerberos"
+          }
+        ],
+        "defaultIndex": 0
+      },
+      {
+        "itemId": 2,
+        "name": "rpcProtection",
+        "elements": [
+          {
+            "itemId": 1,
+            "name": "authentication",
+            "label": "Authentication"
+          },
+          {
+            "itemId": 2,
+            "name": "integrity",
+            "label": "Integrity"
+          },
+          {
+            "itemId": 3,
+            "name": "privacy",
+            "label": "Privacy"
+          }
+        ],
+        "defaultIndex": 0
+      }
+    ],
+    "dataMaskDef": {
+      "maskTypes": [],
+      "accessTypes": [],
+      "resources": []
+    },
+    "rowFilterDef": {
+      "accessTypes": [],
+      "resources": []
+    },
+    "id": 1,
+    "guid": "0d047247-bafe-4cf8-8e9b-d5d377284b2d",
+    "isEnabled": true,
+    "createTime": "20160314-14:39:34.000-+0000",
+    "updateTime": "20160314-14:39:34.000-+0000",
+    "version": 1
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/eb31b7e9/hdfs-agent/src/test/resources/ranger-hdfs-security.xml
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/test/resources/ranger-hdfs-security.xml 
b/hdfs-agent/src/test/resources/ranger-hdfs-security.xml
new file mode 100644
index 0000000..3062108
--- /dev/null
+++ b/hdfs-agent/src/test/resources/ranger-hdfs-security.xml
@@ -0,0 +1,45 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<configuration xmlns:xi="http://www.w3.org/2001/XInclude";>
+
+       <property>
+               <name>ranger.plugin.hdfs.service.name</name>
+               <value>HDFSTest</value>
+               <description>
+                       Name of the Ranger service containing policies for this 
SampleApp instance
+               </description>
+       </property>
+
+       <property>
+        <name>ranger.plugin.hdfs.policy.source.impl</name>
+        <value>org.apache.ranger.services.hdfs.RangerAdminClientImpl</value>
+        <description>
+            Policy source.
+        </description>
+    </property>
+    
+       <property>
+               <name>ranger.plugin.hdfs.policy.cache.dir</name>
+               <value>${project.build.directory}</value>
+               <description>
+                       Directory where Ranger policies are cached after 
successful retrieval from the source
+               </description>
+       </property>
+
+</configuration>


Reply via email to