http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorBase.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorBase.java
 
b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorBase.java
new file mode 100644
index 0000000..2dab171
--- /dev/null
+++ 
b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessorBase.java
@@ -0,0 +1,671 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ranger.authorization.hbase;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.NamespaceDescriptor;
+import org.apache.hadoop.hbase.ServerName;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Mutation;
+import org.apache.hadoop.hbase.coprocessor.BaseRegionObserver;
+import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.MasterObserver;
+import org.apache.hadoop.hbase.coprocessor.ObserverContext;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
+import org.apache.hadoop.hbase.master.RegionPlan;
+import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
+import 
org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.CleanupBulkLoadRequest;
+import 
org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos.PrepareBulkLoadRequest;
+import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.replication.ReplicationEndpoint;
+import org.apache.hadoop.hbase.security.access.Permission;
+import org.apache.hadoop.hbase.security.access.Permission.Action;
+
+public class RangerAuthorizationCoprocessorBase extends BaseRegionObserver
+               implements MasterObserver, RegionServerObserver, 
BulkLoadObserver {
+
+       @Override
+       public void preStopRegionServer(
+                       ObserverContext<RegionServerCoprocessorEnvironment> env)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preMerge(
+                       ObserverContext<RegionServerCoprocessorEnvironment> ctx,
+                       HRegion regionA, HRegion regionB) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postMerge(
+                       ObserverContext<RegionServerCoprocessorEnvironment> c,
+                       HRegion regionA, HRegion regionB, HRegion mergedRegion)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preMergeCommit(
+                       ObserverContext<RegionServerCoprocessorEnvironment> ctx,
+                       HRegion regionA, HRegion regionB, List<Mutation> 
metaEntries)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postMergeCommit(
+                       ObserverContext<RegionServerCoprocessorEnvironment> ctx,
+                       HRegion regionA, HRegion regionB, HRegion mergedRegion)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preRollBackMerge(
+                       ObserverContext<RegionServerCoprocessorEnvironment> ctx,
+                       HRegion regionA, HRegion regionB) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postRollBackMerge(
+                       ObserverContext<RegionServerCoprocessorEnvironment> ctx,
+                       HRegion regionA, HRegion regionB) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preCreateTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HTableDescriptor desc, HRegionInfo[] regions) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postCreateTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HTableDescriptor desc, HRegionInfo[] regions) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preCreateTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HTableDescriptor desc, HRegionInfo[] regions) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postCreateTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HTableDescriptor desc, HRegionInfo[] regions) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preModifyTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HTableDescriptor htd) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postModifyTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HTableDescriptor htd) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preModifyTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HTableDescriptor htd) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postModifyTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HTableDescriptor htd) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preAddColumn(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       TableName tableName, HColumnDescriptor column) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postAddColumn(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor column) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preAddColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor column) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postAddColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor column) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preModifyColumn(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor descriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postModifyColumn(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor descriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preModifyColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor descriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postModifyColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, HColumnDescriptor descriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteColumn(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, byte[] c) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteColumn(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, byte[] c) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, byte[] c) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteColumnHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName, byte[] c) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preEnableTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postEnableTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preEnableTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postEnableTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDisableTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDisableTable(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDisableTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDisableTableHandler(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       TableName tableName) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preMove(ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HRegionInfo region, ServerName srcServer, ServerName 
destServer)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postMove(ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HRegionInfo region, ServerName srcServer, ServerName 
destServer)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preAssign(ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HRegionInfo regionInfo) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postAssign(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       HRegionInfo regionInfo) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preUnassign(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       HRegionInfo regionInfo, boolean force) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postUnassign(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       HRegionInfo regionInfo, boolean force) throws 
IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preRegionOffline(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HRegionInfo regionInfo) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postRegionOffline(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       HRegionInfo regionInfo) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preBalance(ObserverContext<MasterCoprocessorEnvironment> 
ctx)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postBalance(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       List<RegionPlan> plans) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public boolean preBalanceSwitch(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx, 
boolean newValue)
+                       throws IOException {
+               // TODO Auto-generated method stub
+               return false;
+       }
+
+       @Override
+       public void postBalanceSwitch(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       boolean oldValue, boolean newValue) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preShutdown(ObserverContext<MasterCoprocessorEnvironment> 
ctx)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preStopMaster(ObserverContext<MasterCoprocessorEnvironment> 
ctx)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postStartMaster(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preMasterInitialization(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postSnapshot(ObserverContext<MasterCoprocessorEnvironment> 
ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preCloneSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postCloneSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preRestoreSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postRestoreSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot, HTableDescriptor 
hTableDescriptor)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteSnapshot(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       SnapshotDescription snapshot) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preGetTableDescriptors(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       List<TableName> tableNamesList, List<HTableDescriptor> 
descriptors)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postGetTableDescriptors(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       List<HTableDescriptor> descriptors) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preCreateNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       NamespaceDescriptor ns) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postCreateNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       NamespaceDescriptor ns) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preDeleteNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx, 
String namespace)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postDeleteNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx, 
String namespace)
+                       throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void preModifyNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       NamespaceDescriptor ns) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       @Override
+       public void postModifyNamespace(
+                       ObserverContext<MasterCoprocessorEnvironment> ctx,
+                       NamespaceDescriptor ns) throws IOException {
+               // TODO Auto-generated method stub
+
+       }
+
+       public void 
preRollWALWriterRequest(ObserverContext<RegionServerCoprocessorEnvironment> 
ctx) throws IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void 
postRollWALWriterRequest(ObserverContext<RegionServerCoprocessorEnvironment> 
ctx) throws IOException {
+                       // TODO Auto-generated method stub
+
+       }
+       
+       public void preTableFlush(final 
ObserverContext<MasterCoprocessorEnvironment> ctx, final TableName tableName) 
throws IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void 
postTableFlush(ObserverContext<MasterCoprocessorEnvironment> ctx, TableName 
tableName) throws IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void preTruncateTableHandler(final 
ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws 
IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void postTruncateTableHandler(final 
ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws 
IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void preTruncateTable(final 
ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws 
IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public void postTruncateTable(final 
ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws 
IOException {
+                       // TODO Auto-generated method stub
+
+       }
+
+       public ReplicationEndpoint 
postCreateReplicationEndPoint(ObserverContext<RegionServerCoprocessorEnvironment>
 ctx, ReplicationEndpoint endpoint) {
+               return endpoint;
+       }
+
+       public void 
prePrepareBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx, 
PrepareBulkLoadRequest request) throws IOException {
+       }
+
+       public void 
preCleanupBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx, 
CleanupBulkLoadRequest request) throws IOException {
+       }
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/RangerAccessControlListsTest.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/RangerAccessControlListsTest.java
 
b/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/RangerAccessControlListsTest.java
new file mode 100644
index 0000000..aa66d08
--- /dev/null
+++ 
b/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/RangerAccessControlListsTest.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hbase.security.access;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hbase.master.MasterServices;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class RangerAccessControlListsTest {
+
+       @BeforeClass
+       public static void setUpBeforeClass() throws Exception {
+       }
+
+       @AfterClass
+       public static void tearDownAfterClass() throws Exception {
+       }
+
+       @Before
+       public void setUp() throws Exception {
+       }
+
+       @After
+       public void tearDown() throws Exception {
+       }
+
+       @Test
+       public void testInit() {
+               IOException exceptionFound = null ;
+               try {
+                       MasterServices service = null ;
+                       RangerAccessControlLists.init(service) ;
+               } catch (IOException e) {
+                       exceptionFound = e ;
+               }
+               Assert.assertFalse("Expected to get a NullPointerExecution 
after init method Execution - Found [" + exceptionFound + "]",  
(!(exceptionFound != null && exceptionFound.getCause() instanceof 
NullPointerException))) ;
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/XaAccessControlListsTest.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/XaAccessControlListsTest.java
 
b/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/XaAccessControlListsTest.java
deleted file mode 100644
index 0b8afbb..0000000
--- 
a/hbase-agent/src/test/java/org/apache/hadoop/hbase/security/access/XaAccessControlListsTest.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hbase.security.access;
-
-import java.io.IOException;
-
-import org.apache.hadoop.hbase.master.MasterServices;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class XaAccessControlListsTest {
-
-       @BeforeClass
-       public static void setUpBeforeClass() throws Exception {
-       }
-
-       @AfterClass
-       public static void tearDownAfterClass() throws Exception {
-       }
-
-       @Before
-       public void setUp() throws Exception {
-       }
-
-       @After
-       public void tearDown() throws Exception {
-       }
-
-       @Test
-       public void testInit() {
-               IOException exceptionFound = null ;
-               try {
-                       MasterServices service = null ;
-                       XaAccessControlLists.init(service) ;
-               } catch (IOException e) {
-                       exceptionFound = e ;
-               }
-               Assert.assertFalse("Expected to get a NullPointerExecution 
after init method Execution - Found [" + exceptionFound + "]",  
(!(exceptionFound != null && exceptionFound.getCause() instanceof 
NullPointerException))) ;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/conf/xasecure-hdfs-security-changes.cfg
----------------------------------------------------------------------
diff --git a/hdfs-agent/conf/xasecure-hdfs-security-changes.cfg 
b/hdfs-agent/conf/xasecure-hdfs-security-changes.cfg
index 76f4919..97e631a5 100644
--- a/hdfs-agent/conf/xasecure-hdfs-security-changes.cfg
+++ b/hdfs-agent/conf/xasecure-hdfs-security-changes.cfg
@@ -16,7 +16,7 @@
 # Change the original policy parameter to work with policy manager based.
 # 
 #
-hdfs.authorization.verifier.classname                          
com.xasecure.pdp.hdfs.XASecureAuthorizer                                        
                        mod     create-if-not-exists
+hdfs.authorization.verifier.classname                          
org.apache.ranger.pdp.hdfs.RangerAuthorizer                                     
                        mod     create-if-not-exists
 xasecure.hdfs.policymgr.url                                                    
%POLICY_MGR_URL%/service/assets/policyList/%REPOSITORY_NAME%                    
mod create-if-not-exists
 xasecure.hdfs.policymgr.url.saveAsFile                         
/tmp/hadoop_%REPOSITORY_NAME%_json                                              
                        mod create-if-not-exists
 xasecure.hdfs.policymgr.url.laststoredfile                     
%POLICY_CACHE_FILE_PATH%/hadoop_%REPOSITORY_NAME%_json                          
        mod create-if-not-exists

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/conf/xasecure-hdfs-security.xml
----------------------------------------------------------------------
diff --git a/hdfs-agent/conf/xasecure-hdfs-security.xml 
b/hdfs-agent/conf/xasecure-hdfs-security.xml
index 81f8f6a..9cf5b69 100644
--- a/hdfs-agent/conf/xasecure-hdfs-security.xml
+++ b/hdfs-agent/conf/xasecure-hdfs-security.xml
@@ -21,7 +21,7 @@
        <!--  The following property is used to select appropriate XASecure 
Authorizer Module (filebased, policymanager based) -->
        <property>
                <name>hdfs.authorization.verifier.classname</name>
-               <value>com.xasecure.pdp.hdfs.XASecureAuthorizer</value>
+               <value>org.apache.ranger.pdp.hdfs.RangerAuthorizer</value>
                <description>
                        Class Name of the authorization Module 
                </description>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/scripts/install.sh
----------------------------------------------------------------------
diff --git a/hdfs-agent/scripts/install.sh b/hdfs-agent/scripts/install.sh
index cc233da..ba72c4d 100644
--- a/hdfs-agent/scripts/install.sh
+++ b/hdfs-agent/scripts/install.sh
@@ -315,7 +315,7 @@ do
                 if [ $? -eq 0 ]
                 then
                        
cp="${install_dir}/installer/lib/*:${hdp_dir}/*:${hdp_lib_dir}/*"
-                        java -cp "${cp}" 
com.xasecure.utils.install.XmlConfigChanger -i ${archivefn} -o ${newfn} -c ${f} 
${PROP_ARGS}
+                        java -cp "${cp}" 
org.apache.ranger.utils.install.XmlConfigChanger -i ${archivefn} -o ${newfn} -c 
${f} ${PROP_ARGS}
                         if [ $? -eq 0 ]
                         then
                                 diff -w ${newfn} ${fullpathorgfn} > /dev/null 
2>&1 

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifier.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifier.java
 
b/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifier.java
deleted file mode 100644
index 358e765..0000000
--- 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifier.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.xasecure.authorization.hadoop;
-
-import java.util.Set;
-
-public interface HDFSAccessVerifier {
-       public class AccessContext {
-               String agentId;
-               int repositoryType;
-               String sessionId;
-               String clientType;
-               String clientIP;
-               String requestData;
-       }
-       
-       public boolean isAccessGranted(String aPathName, String aPathOwnerName, 
String access, String username, Set<String> groups);
-       public boolean isAuditLogEnabled(String aPathName) ;
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifierFactory.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifierFactory.java
 
b/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifierFactory.java
deleted file mode 100644
index 90dc085..0000000
--- 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/HDFSAccessVerifierFactory.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.xasecure.authorization.hadoop;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-
-import com.xasecure.authorization.hadoop.config.XaSecureConfiguration;
-import com.xasecure.authorization.hadoop.constants.XaSecureHadoopConstants;
-
-public class HDFSAccessVerifierFactory {
-       
-       private static final Log LOG = 
LogFactory.getLog(HDFSAccessVerifierFactory.class) ;
-
-       private static HDFSAccessVerifier hdfsAccessVerifier = null ;
-       
-       public static HDFSAccessVerifier getInstance() {
-               if (hdfsAccessVerifier == null) {
-                       synchronized(HDFSAccessVerifierFactory.class) {
-                               HDFSAccessVerifier temp = hdfsAccessVerifier ;
-                               if (temp == null) {
-                                       
-                                       String hdfsAccessVerifierClassName = 
XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.HDFS_ACCESS_VERIFIER_CLASS_NAME_PROP,
 XaSecureHadoopConstants.HDFS_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE) ;
-                                       if (hdfsAccessVerifierClassName != 
null) {
-                                               try {
-                                                       
hdfsAccessVerifierClassName = hdfsAccessVerifierClassName.trim();
-                                                       hdfsAccessVerifier = 
(HDFSAccessVerifier) (Class.forName(hdfsAccessVerifierClassName).newInstance()) 
;
-                                                       LOG.info("Created a new 
instance of class: [" + hdfsAccessVerifierClassName + "] for HDFS Access 
verification.");
-                                               } catch (InstantiationException 
e) {
-                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
-                                               } catch (IllegalAccessException 
e) {
-                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
-                                               } catch (ClassNotFoundException 
e) {
-                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
-                                               }
-                                       }
-                               }
-                       }
-               }
-               return hdfsAccessVerifier ;
-               
-       }
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
 
b/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
deleted file mode 100644
index bdb2530..0000000
--- 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.xasecure.authorization.hadoop.agent;
-
-import java.lang.instrument.Instrumentation;
-
-public class AuthCodeInjectionJavaAgent {
-       public static final String AUTHORIZATION_AGENT_PARAM = "authagent";
-
-       public static void premain(String agentArgs, Instrumentation inst) {
-               if (agentArgs != null && 
AUTHORIZATION_AGENT_PARAM.equalsIgnoreCase(agentArgs.trim())) {
-                       inst.addTransformer(new HadoopAuthClassTransformer());
-               }
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/HadoopAuthClassTransformer.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/HadoopAuthClassTransformer.java
 
b/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/HadoopAuthClassTransformer.java
deleted file mode 100644
index 9a8caf2..0000000
--- 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/agent/HadoopAuthClassTransformer.java
+++ /dev/null
@@ -1,196 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.xasecure.authorization.hadoop.agent;
-
-import java.io.IOException;
-import java.lang.instrument.ClassFileTransformer;
-import java.lang.instrument.IllegalClassFormatException;
-import java.security.ProtectionDomain;
-
-import javassist.CannotCompileException;
-import javassist.ClassPool;
-import javassist.CtClass;
-import javassist.CtMethod;
-import javassist.NotFoundException;
-
-public class HadoopAuthClassTransformer implements ClassFileTransformer {
-
-       byte[] transformedClassByteCode = null ;
-       
-       @Override
-       public byte[] transform(ClassLoader aClassLoader, String aClassName, 
Class<?> aClassBeingRedefined, ProtectionDomain aProtectionDomain, byte[] 
aClassFileBuffer) throws IllegalClassFormatException {
-
-               byte[] byteCode = aClassFileBuffer;
-               if 
(aClassName.equals("org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker"))
 {
-                       System.out.println("Injection code is Invoked in JVM [" 
+ Runtime.getRuntime() + "] for class [" + aClassBeingRedefined + "] ....");
-                       try {
-                               if (transformedClassByteCode == null) {
-                                       ClassPool cp = ClassPool.getDefault();
-                                       String curClassName = 
aClassName.replaceAll("/", ".");
-                                       CtClass curClass = cp.get(curClassName);
-                                       
-                                       
-                                       CtClass inodeClass = null, 
snapShotClass = null, fsActionClass = null  ;
-                                       String paramClassName = null ;
-                                       
-                                       try {
-                                               paramClassName = 
"org.apache.hadoop.hdfs.server.namenode.INode" ;
-                                               inodeClass = 
cp.get(paramClassName) ;
-                                       } catch (javassist.NotFoundException 
nfe) {
-                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
-                                               inodeClass = null ;
-                                       }
-
-
-                                       try {
-                                               paramClassName = 
"org.apache.hadoop.fs.permission.FsAction" ;
-                                               fsActionClass = 
cp.get(paramClassName) ;
-                                       } catch (javassist.NotFoundException 
nfe) {
-                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
-                                               fsActionClass = null ;
-                                       }
-                                       
-                                       try {
-                                               paramClassName = 
"org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot" ;
-                                               snapShotClass = 
cp.get(paramClassName) ;
-                                       } catch (javassist.NotFoundException 
nfe) {
-                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
-                                               snapShotClass = null ;
-                                       }
-                                       
-                                       boolean injected = false ;
-                                       boolean injected_cm = false ;
-                                       boolean withIntParamInMiddle = false ;
-
-                                       
-                                       try {
-                                               
-                                               CtClass[] paramArgs = null ;
-                                               
-                                               if (inodeClass != null && 
fsActionClass != null) {
-
-                                                       CtMethod checkMethod = 
null ;
-                                                       
-                                                       if (snapShotClass != 
null) {
-                                                               paramArgs = new 
CtClass[] { inodeClass, snapShotClass, fsActionClass } ;
-                                                               try {
-                                                                       
checkMethod = curClass.getDeclaredMethod("check", paramArgs);
-                                                               }
-                                                               
catch(NotFoundException SSnfe) {
-                                                                       
System.out.println("Unable to find check method with snapshot class. Trying to 
find check method without snapshot support.") ;
-                                                                       
snapShotClass = null;
-                                                                       
paramArgs = new CtClass[] { inodeClass, CtClass.intType,  fsActionClass } ;
-                                                                       
checkMethod = curClass.getDeclaredMethod("check", paramArgs);
-                                                                       
withIntParamInMiddle = true ;
-                                                                       
System.out.println("Found method check() - without snapshot support") ;
-                                                               }
-                                                       }
-                                                       else {
-                                                               
System.out.println("Snapshot class was already null ... Trying to find check 
method") ;
-                                                               paramArgs = new 
CtClass[] { inodeClass, fsActionClass } ;
-                                                               checkMethod = 
curClass.getDeclaredMethod("check", paramArgs);
-                                                               
System.out.println("Found method check() - without snapshot support") ;
-                                                       }
-                                               
-                                                       if (checkMethod != 
null) {
-                                                               if 
(snapShotClass == null && (!withIntParamInMiddle)) {
-                                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.logHadoopEvent(ugi,$1,$2,true)
 ;");
-                                                                       CtClass 
throwable = ClassPool.getDefault().get("java.lang.Throwable");
-                                                                       
checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.logHadoopEvent(ugi,$1,$2,false)
 ; throw $e; }", throwable);
-                                                                       
checkMethod.insertBefore("{ if ( 
org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.check(ugi,$1,$2)
 ) { return ; } }");
-                                                               }
-                                                               else {
-                                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.logHadoopEvent(ugi,$1,$3,true)
 ;");
-                                                                       CtClass 
throwable = ClassPool.getDefault().get("java.lang.Throwable");
-                                                                       
checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.logHadoopEvent(ugi,$1,$3,false)
 ; throw $e; }", throwable);  
-                                                                       
checkMethod.insertBefore("{ if ( 
org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.check(ugi,$1,$3)
 ) { return ; } }");
-                                                               }
-                                                               
System.out.println("Injection of code is successfull ....");
-                                                       }
-                                                       else {
-                                                               
System.out.println("Injection failed. Unable to identify check() method on 
class: [" + curClass.getName() + "]. Continue without Injection ...") ; 
-                                                       }
-                                                       
-                                                       injected = true ;
-                                               }
-                                       } catch (NotFoundException nfex) {
-                                               nfex.printStackTrace();
-                                               System.out.println("Unable to 
find the check() method with expected params in [" + aClassName + "] ....");
-                                               for (CtMethod m : 
curClass.getDeclaredMethods()) {
-                                                       
System.err.println("Found Method: " + m);
-                                               }
-                                       }
-                                       
-                                       
-                                       try {
-                                               
-                                               CtMethod checkMethod = 
curClass.getDeclaredMethod("checkPermission");
-                                               
-                                               if (checkMethod != null) {
-                                                       
checkMethod.insertBefore("org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.checkPermissionPre($1)
 ;");
-                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.writeLog($1)
 ;");
-                                                       CtClass throwable = 
ClassPool.getDefault().get("org.apache.hadoop.security.AccessControlException");
-                                                       checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.XaSecureFSPermissionChecker.writeLog($1);
 throw $e; }", throwable);      
-                                                       injected_cm = true ;
-                                               }
-
-                                       } catch (NotFoundException nfe) {
-                                               nfe.printStackTrace();
-                                               System.out.println("Unable to 
find the checkPermission() method with expected params in [" + aClassName + "] 
....");
-                                               for (CtMethod m : 
curClass.getDeclaredMethods()) {
-                                                       
System.err.println("Found Method: " + m);
-                                               }
-                                       }
-                                       
-                                       System.out.println("Injected: " + 
injected + ", Injected_CheckMethod: " + injected_cm ) ;
-                                       
-                                       if (injected) {
-                                               byteCode = 
curClass.toBytecode();
-                                               if (transformedClassByteCode == 
null) {
-                                                       
synchronized(HadoopAuthClassTransformer.class) {
-                                                               byte[] temp = 
transformedClassByteCode ;
-                                                               if (temp == 
null) {
-                                                                       
transformedClassByteCode = byteCode;
-                                                               }
-                                                       }
-                                               }
-                                       }
-                                       
-                               }
-                               else {
-                                       byteCode = transformedClassByteCode;
-                                       System.out.println("Injection of code 
(using existing bytecode) is successfull ....");
-                               }
-                       } catch (NotFoundException e) {
-                               System.err.println("Class Not Found Exception 
for class Name: " + aClassName + " Exception: " + e);
-                               e.printStackTrace();
-                       } catch (CannotCompileException e) {
-                               System.err.println("Can not compile Exception 
for class Name: " + aClassName + " Exception: " + e);
-                               e.printStackTrace();
-                       } catch (IOException e) {
-                               System.err.println("IO Exception for class 
Name: " + aClassName + " Exception: " + e);
-                               e.printStackTrace();
-                       }
-               
-               }
-               
-               return byteCode;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/exceptions/XaSecureAccessControlException.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/exceptions/XaSecureAccessControlException.java
 
b/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/exceptions/XaSecureAccessControlException.java
deleted file mode 100644
index 90e4600..0000000
--- 
a/hdfs-agent/src/main/java/com/xasecure/authorization/hadoop/exceptions/XaSecureAccessControlException.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.xasecure.authorization.hadoop.exceptions;
-
-import org.apache.hadoop.security.AccessControlException;
-
-
-public class XaSecureAccessControlException extends AccessControlException {
-
-       private static final long serialVersionUID = -4673975720243484927L;
-
-       public XaSecureAccessControlException(String aMsg) {
-               super(aMsg) ;
-       }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
 
b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
new file mode 100644
index 0000000..f558013
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
@@ -0,0 +1,317 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hdfs.server.namenode;
+
+import static 
org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants.*;
+
+import java.net.InetAddress;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.hadoop.ipc.Server;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.ranger.audit.model.EnumRepositoryType;
+import org.apache.ranger.audit.model.HdfsAuditEvent;
+import org.apache.ranger.audit.provider.AuditProviderFactory;
+import org.apache.ranger.authorization.hadoop.HDFSAccessVerifier;
+import org.apache.ranger.authorization.hadoop.HDFSAccessVerifierFactory;
+import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
+import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
+import 
org.apache.ranger.authorization.hadoop.exceptions.RangerAccessControlException;
+
+
+public class RangerFSPermissionChecker {
+
+       private static Map<FsAction, String[]> access2ActionListMapper = null ;
+
+       private static HDFSAccessVerifier authorizer = null ;
+       
+       private static final String RangerModuleName    = 
RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP
 , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ;
+       private static final String HadoopModuleName            = 
RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HADOOP_MODULE_ACL_NAME_PROP
 , RangerHadoopConstants.DEFAULT_HADOOP_MODULE_ACL_NAME) ;
+       private static final boolean addHadoopAuth                      = 
RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_PROP,
 RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_DEFAULT) ;
+       private static final String excludeUserList             = 
RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HDFS_EXCLUDE_LIST_PROP,
 RangerHadoopConstants.AUDITLOG_EMPTY_STRING) ;
+       private static final String repositoryName          = 
RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_REPOSITORY_NAME_PROP);
+       private static final boolean isAuditEnabled         = 
RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.AUDITLOG_IS_ENABLED_PROP,
 true);
+
+       private static final Log LOG = 
LogFactory.getLog(RangerFSPermissionChecker.class);
+
+       private static HashSet<String> excludeUsers = null ;
+       
+       private static ThreadLocal<LogEventInfo> currentValidatedLogEvent = new 
ThreadLocal<LogEventInfo>() ;
+       
+
+       static {
+               access2ActionListMapper = new HashMap<FsAction, String[]>();
+               access2ActionListMapper.put(FsAction.NONE, new String[] {});
+               access2ActionListMapper.put(FsAction.ALL, new String[] { 
READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.READ, new String[] { 
READ_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.READ_WRITE, new String[] { 
READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.READ_EXECUTE, new String[] 
{ READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.WRITE, new String[] { 
WRITE_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.WRITE_EXECUTE, new 
String[] { WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
+               access2ActionListMapper.put(FsAction.EXECUTE, new String[] { 
EXECUTE_ACCCESS_TYPE });
+               
+               if (excludeUserList != null && excludeUserList.trim().length() 
> 0) {
+                       excludeUsers = new HashSet<String>() ;
+                       for(String excludeUser : 
excludeUserList.trim().split(",")) {
+                               excludeUser = excludeUser.trim() ;
+                               if (LOG.isDebugEnabled()) {
+                                       LOG.debug("Adding exclude user [" + 
excludeUser + "]");
+                               }
+                               excludeUsers.add(excludeUser) ;
+                       }
+               }
+
+               
RangerConfiguration.getInstance().initAudit(AuditProviderFactory.ApplicationType.Hdfs);
         
+       }
+
+       public static boolean check(UserGroupInformation ugi, INode inode, 
FsAction access) throws RangerAccessControlException {
+
+               if (inode == null) {
+                       return false;
+               }
+
+               String user = ugi.getShortUserName();
+
+               Set<String> groups = Collections.unmodifiableSet(new 
HashSet<String>(Arrays.asList(ugi.getGroupNames())));
+               
+               String pathOwnerName = inode.getUserName() ;
+
+               boolean accessGranted =  
AuthorizeAccessForUser(inode.getFullPathName(), pathOwnerName, access, user, 
groups);
+               
+               if (!accessGranted &&  !addHadoopAuth ) {
+                       String inodeInfo = (inode.isDirectory() ? "directory" : 
"file") +  "="  + "\"" + inode.getFullPathName() + "\""  ;
+                   throw new RangerAccessControlException("Permission denied: 
principal{user=" + user + ",groups: " + groups + "}, access=" + access + ", " + 
inodeInfo ) ; 
+               }
+               
+               return accessGranted ;
+
+       }
+
+       public static boolean AuthorizeAccessForUser(String aPathName, String 
aPathOwnerName, FsAction access, String user, Set<String> groups) throws 
RangerAccessControlException {
+               boolean accessGranted = false;
+               try {
+                       if 
(RangerHadoopConstants.HDFS_ROOT_FOLDER_PATH_ALT.equals(aPathName)) {
+                               aPathName = 
RangerHadoopConstants.HDFS_ROOT_FOLDER_PATH;
+                       }
+                       
+                       String[] accessTypes = 
access2ActionListMapper.get(access);
+
+                       if ((accessTypes == null) || (accessTypes.length == 0)) 
{
+                               accessGranted = false;
+                       } else {
+                               
+                               if (authorizer == null) {
+                                       
synchronized(RangerFSPermissionChecker.class) {
+                                               HDFSAccessVerifier temp = 
authorizer ;
+                                               if (temp == null) {
+                                                       try {
+                                                               authorizer = 
HDFSAccessVerifierFactory.getInstance();
+                                                       }
+                                                       catch(Throwable t) {
+                                                               
LOG.error("Unable to create Authorizer", t);
+                                                       }
+                                               }
+                                       }
+                               }
+                               
+                               if (authorizer != null) {
+                                       for (String accessType : accessTypes) {
+                                               accessGranted = 
authorizer.isAccessGranted(aPathName, aPathOwnerName, accessType, user, groups);
+                                               if (!accessGranted) {
+                                                       break;
+                                               }
+                                       }
+                               }
+                       }
+
+               } finally {
+                       logEvent(RangerModuleName, user, aPathName, access, 
accessGranted);
+               }
+               return accessGranted;
+       }
+       
+       
+       public static void logHadoopEvent(UserGroupInformation ugi, INode 
inode, FsAction access, boolean accessGranted) {
+               String path = (inode == null) ? 
RangerHadoopConstants.AUDITLOG_EMPTY_STRING : inode.getFullPathName() ;
+               String username = (ugi == null) ? 
RangerHadoopConstants.AUDITLOG_EMPTY_STRING : ugi.getShortUserName() ;
+               logEvent(HadoopModuleName, username, path,  access, 
accessGranted);
+       }
+       
+       
+
+       
+       
+       private static void logEvent(String moduleName,  String username, 
String path, FsAction access, boolean accessGranted) {
+               LogEventInfo e = null;
+
+               if(isAuditEnabled) {
+                   e = new LogEventInfo(moduleName,  username, path, access, 
accessGranted) ;
+               }
+
+               currentValidatedLogEvent.set(e);
+       }
+       
+       
+       public static void checkPermissionPre(String pathToBeValidated) {
+               // TODO: save the path in a thread-local
+       }
+       
+       public static void checkPermissionPost(String pathToBeValidated) {
+               writeLog(pathToBeValidated);
+       }
+
+       public static void writeLog(String pathValidated) {
+               
+               LogEventInfo e = currentValidatedLogEvent.get();
+               
+               if (e == null) {
+                       return ;
+               }
+               
+               String username = e.getUserName() ;
+               
+               boolean skipLog = (username != null && excludeUsers != null && 
excludeUsers.contains(username)) ;
+               
+               if (skipLog) {
+                       return ;
+               }
+
+               String requestedPath = e.getPath() ;
+               
+               if (requestedPath == null) {
+                       requestedPath = 
RangerHadoopConstants.AUDITLOG_EMPTY_STRING ;
+               }
+
+               if (! authorizer.isAuditLogEnabled(requestedPath)) {
+                       return ;
+               }
+               
+               
+               String accessType = ( (e.getAccess() == null) ? 
RangerHadoopConstants.AUDITLOG_EMPTY_STRING : e.getAccess().toString() ) ;
+               
+               HdfsAuditEvent auditEvent = new HdfsAuditEvent();
+
+               auditEvent.setUser(username);
+               auditEvent.setResourcePath(requestedPath);
+               auditEvent.setResourceType("HDFSPath") ;
+               auditEvent.setAccessType(accessType);
+               auditEvent.setAccessResult((short)(e.isAccessGranted() ? 1 : 
0));
+               auditEvent.setClientIP(getRemoteIp());
+               auditEvent.setEventTime(getUTCDate());
+               auditEvent.setAclEnforcer(e.getModuleName());
+               auditEvent.setRepositoryType(EnumRepositoryType.HDFS);
+               auditEvent.setRepositoryName(repositoryName);
+               auditEvent.setResultReason(pathValidated);
+
+               /*
+                * Review following audit fields for appropriate values
+                *
+               auditEvent.setAgentId();
+               auditEvent.setPolicyId();
+               auditEvent.setSessionId();
+               auditEvent.setClientType();
+                *
+                */
+
+               try {
+                       if (LOG.isDebugEnabled()) {
+                               LOG.debug("Audit log of auditEvent: [" + 
auditEvent.toString() + "] - START.");
+                       }
+                       AuditProviderFactory.getAuditProvider().log(auditEvent);
+                       if (LOG.isDebugEnabled()) {
+                               LOG.debug("Audit log of auditEvent: [" + 
auditEvent.toString() + "] - END.");
+                       }
+               }
+               catch(Throwable t) {
+                       LOG.error("ERROR during audit log of auditEvent: [" + 
auditEvent.toString() + "]", t);
+               }
+       }
+       
+       
+       private static String getRemoteIp() {
+               String ret = null ;
+               InetAddress ip = Server.getRemoteIp() ;
+               if (ip != null) {
+                       ret = ip.getHostAddress();
+               }
+               return ret ;
+       }
+       
+       
+       public static Date getUTCDate() {
+               Calendar local=Calendar.getInstance();
+           int offset = local.getTimeZone().getOffset(local.getTimeInMillis());
+           GregorianCalendar utc = new 
GregorianCalendar(TimeZone.getTimeZone("GMT+0"));
+           utc.setTimeInMillis(local.getTimeInMillis());
+           utc.add(Calendar.MILLISECOND, -offset);
+           return utc.getTime();
+       }
+
+}
+
+class LogEventInfo {
+       String moduleName ;
+       String userName ;
+       String path ;
+       FsAction access ;
+       boolean accessGranted ;
+       
+       LogEventInfo(String moduleName,  String username, String path, FsAction 
access, boolean accessGranted) {
+               this.moduleName = moduleName ;
+               this.userName = username ;
+               this.path = path ;
+               this.access = access ;
+               this.accessGranted = accessGranted;
+       }
+
+       public String getModuleName() {
+               return moduleName;
+       }
+
+       public String getUserName() {
+               return userName;
+       }
+
+       public String getPath() {
+               return path;
+       }
+
+       public FsAction getAccess() {
+               return access;
+       }
+
+       public boolean isAccessGranted() {
+               return accessGranted;
+       }
+       
+       
+       
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/XaSecureFSPermissionChecker.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/XaSecureFSPermissionChecker.java
 
b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/XaSecureFSPermissionChecker.java
deleted file mode 100644
index ba61d1d..0000000
--- 
a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/XaSecureFSPermissionChecker.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * 
- * http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hadoop.hdfs.server.namenode;
-
-import static 
com.xasecure.authorization.hadoop.constants.XaSecureHadoopConstants.*;
-
-import java.net.InetAddress;
-import java.util.Arrays;
-import java.util.Calendar;
-import java.util.Collections;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.ipc.Server;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import com.xasecure.audit.model.EnumRepositoryType;
-import com.xasecure.audit.model.HdfsAuditEvent;
-import com.xasecure.audit.provider.AuditProviderFactory;
-import com.xasecure.authorization.hadoop.HDFSAccessVerifier;
-import com.xasecure.authorization.hadoop.HDFSAccessVerifierFactory;
-import com.xasecure.authorization.hadoop.config.XaSecureConfiguration;
-import com.xasecure.authorization.hadoop.constants.XaSecureHadoopConstants;
-import 
com.xasecure.authorization.hadoop.exceptions.XaSecureAccessControlException;
-
-
-public class XaSecureFSPermissionChecker {
-
-       private static Map<FsAction, String[]> access2ActionListMapper = null ;
-
-       private static HDFSAccessVerifier authorizer = null ;
-       
-       private static final String XaSecureModuleName          = 
XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.AUDITLOG_XASECURE_MODULE_ACL_NAME_PROP
 , XaSecureHadoopConstants.DEFAULT_XASECURE_MODULE_ACL_NAME) ;
-       private static final String HadoopModuleName            = 
XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.AUDITLOG_HADOOP_MODULE_ACL_NAME_PROP
 , XaSecureHadoopConstants.DEFAULT_HADOOP_MODULE_ACL_NAME) ;
-       private static final boolean addHadoopAuth                      = 
XaSecureConfiguration.getInstance().getBoolean(XaSecureHadoopConstants.XASECURE_ADD_HDFS_PERMISSION_PROP,
 XaSecureHadoopConstants.XASECURE_ADD_HDFS_PERMISSION_DEFAULT) ;
-       private static final String excludeUserList             = 
XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.AUDITLOG_HDFS_EXCLUDE_LIST_PROP,
 XaSecureHadoopConstants.AUDITLOG_EMPTY_STRING) ;
-       private static final String repositoryName          = 
XaSecureConfiguration.getInstance().get(XaSecureHadoopConstants.AUDITLOG_REPOSITORY_NAME_PROP);
-       private static final boolean isAuditEnabled         = 
XaSecureConfiguration.getInstance().getBoolean(XaSecureHadoopConstants.AUDITLOG_IS_ENABLED_PROP,
 true);
-
-       private static final Log LOG = 
LogFactory.getLog(XaSecureFSPermissionChecker.class);
-
-       private static HashSet<String> excludeUsers = null ;
-       
-       private static ThreadLocal<LogEventInfo> currentValidatedLogEvent = new 
ThreadLocal<LogEventInfo>() ;
-       
-
-       static {
-               access2ActionListMapper = new HashMap<FsAction, String[]>();
-               access2ActionListMapper.put(FsAction.NONE, new String[] {});
-               access2ActionListMapper.put(FsAction.ALL, new String[] { 
READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.READ, new String[] { 
READ_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.READ_WRITE, new String[] { 
READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.READ_EXECUTE, new String[] 
{ READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.WRITE, new String[] { 
WRITE_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.WRITE_EXECUTE, new 
String[] { WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE });
-               access2ActionListMapper.put(FsAction.EXECUTE, new String[] { 
EXECUTE_ACCCESS_TYPE });
-               
-               if (excludeUserList != null && excludeUserList.trim().length() 
> 0) {
-                       excludeUsers = new HashSet<String>() ;
-                       for(String excludeUser : 
excludeUserList.trim().split(",")) {
-                               excludeUser = excludeUser.trim() ;
-                               if (LOG.isDebugEnabled()) {
-                                       LOG.debug("Adding exclude user [" + 
excludeUser + "]");
-                               }
-                               excludeUsers.add(excludeUser) ;
-                       }
-               }
-
-               
XaSecureConfiguration.getInstance().initAudit(AuditProviderFactory.ApplicationType.Hdfs);
               
-       }
-
-       public static boolean check(UserGroupInformation ugi, INode inode, 
FsAction access) throws XaSecureAccessControlException {
-
-               if (inode == null) {
-                       return false;
-               }
-
-               String user = ugi.getShortUserName();
-
-               Set<String> groups = Collections.unmodifiableSet(new 
HashSet<String>(Arrays.asList(ugi.getGroupNames())));
-               
-               String pathOwnerName = inode.getUserName() ;
-
-               boolean accessGranted =  
AuthorizeAccessForUser(inode.getFullPathName(), pathOwnerName, access, user, 
groups);
-               
-               if (!accessGranted &&  !addHadoopAuth ) {
-                       String inodeInfo = (inode.isDirectory() ? "directory" : 
"file") +  "="  + "\"" + inode.getFullPathName() + "\""  ;
-                   throw new XaSecureAccessControlException("Permission 
denied: principal{user=" + user + ",groups: " + groups + "}, access=" + access 
+ ", " + inodeInfo ) ; 
-               }
-               
-               return accessGranted ;
-
-       }
-
-       public static boolean AuthorizeAccessForUser(String aPathName, String 
aPathOwnerName, FsAction access, String user, Set<String> groups) throws 
XaSecureAccessControlException {
-               boolean accessGranted = false;
-               try {
-                       if 
(XaSecureHadoopConstants.HDFS_ROOT_FOLDER_PATH_ALT.equals(aPathName)) {
-                               aPathName = 
XaSecureHadoopConstants.HDFS_ROOT_FOLDER_PATH;
-                       }
-                       
-                       String[] accessTypes = 
access2ActionListMapper.get(access);
-
-                       if ((accessTypes == null) || (accessTypes.length == 0)) 
{
-                               accessGranted = false;
-                       } else {
-                               
-                               if (authorizer == null) {
-                                       
synchronized(XaSecureFSPermissionChecker.class) {
-                                               HDFSAccessVerifier temp = 
authorizer ;
-                                               if (temp == null) {
-                                                       try {
-                                                               authorizer = 
HDFSAccessVerifierFactory.getInstance();
-                                                       }
-                                                       catch(Throwable t) {
-                                                               
LOG.error("Unable to create Authorizer", t);
-                                                       }
-                                               }
-                                       }
-                               }
-                               
-                               if (authorizer != null) {
-                                       for (String accessType : accessTypes) {
-                                               accessGranted = 
authorizer.isAccessGranted(aPathName, aPathOwnerName, accessType, user, groups);
-                                               if (!accessGranted) {
-                                                       break;
-                                               }
-                                       }
-                               }
-                       }
-
-               } finally {
-                       logEvent(XaSecureModuleName, user, aPathName, access, 
accessGranted);
-               }
-               return accessGranted;
-       }
-       
-       
-       public static void logHadoopEvent(UserGroupInformation ugi, INode 
inode, FsAction access, boolean accessGranted) {
-               String path = (inode == null) ? 
XaSecureHadoopConstants.AUDITLOG_EMPTY_STRING : inode.getFullPathName() ;
-               String username = (ugi == null) ? 
XaSecureHadoopConstants.AUDITLOG_EMPTY_STRING : ugi.getShortUserName() ;
-               logEvent(HadoopModuleName, username, path,  access, 
accessGranted);
-       }
-       
-       
-
-       
-       
-       private static void logEvent(String moduleName,  String username, 
String path, FsAction access, boolean accessGranted) {
-               LogEventInfo e = null;
-
-               if(isAuditEnabled) {
-                   e = new LogEventInfo(moduleName,  username, path, access, 
accessGranted) ;
-               }
-
-               currentValidatedLogEvent.set(e);
-       }
-       
-       
-       public static void checkPermissionPre(String pathToBeValidated) {
-               // TODO: save the path in a thread-local
-       }
-       
-       public static void checkPermissionPost(String pathToBeValidated) {
-               writeLog(pathToBeValidated);
-       }
-
-       public static void writeLog(String pathValidated) {
-               
-               LogEventInfo e = currentValidatedLogEvent.get();
-               
-               if (e == null) {
-                       return ;
-               }
-               
-               String username = e.getUserName() ;
-               
-               boolean skipLog = (username != null && excludeUsers != null && 
excludeUsers.contains(username)) ;
-               
-               if (skipLog) {
-                       return ;
-               }
-
-               String requestedPath = e.getPath() ;
-               
-               if (requestedPath == null) {
-                       requestedPath = 
XaSecureHadoopConstants.AUDITLOG_EMPTY_STRING ;
-               }
-
-               if (! authorizer.isAuditLogEnabled(requestedPath)) {
-                       return ;
-               }
-               
-               
-               String accessType = ( (e.getAccess() == null) ? 
XaSecureHadoopConstants.AUDITLOG_EMPTY_STRING : e.getAccess().toString() ) ;
-               
-               HdfsAuditEvent auditEvent = new HdfsAuditEvent();
-
-               auditEvent.setUser(username);
-               auditEvent.setResourcePath(requestedPath);
-               auditEvent.setResourceType("HDFSPath") ;
-               auditEvent.setAccessType(accessType);
-               auditEvent.setAccessResult((short)(e.isAccessGranted() ? 1 : 
0));
-               auditEvent.setClientIP(getRemoteIp());
-               auditEvent.setEventTime(getUTCDate());
-               auditEvent.setAclEnforcer(e.getModuleName());
-               auditEvent.setRepositoryType(EnumRepositoryType.HDFS);
-               auditEvent.setRepositoryName(repositoryName);
-               auditEvent.setResultReason(pathValidated);
-
-               /*
-                * Review following audit fields for appropriate values
-                *
-               auditEvent.setAgentId();
-               auditEvent.setPolicyId();
-               auditEvent.setSessionId();
-               auditEvent.setClientType();
-                *
-                */
-
-               try {
-                       if (LOG.isDebugEnabled()) {
-                               LOG.debug("Audit log of auditEvent: [" + 
auditEvent.toString() + "] - START.");
-                       }
-                       AuditProviderFactory.getAuditProvider().log(auditEvent);
-                       if (LOG.isDebugEnabled()) {
-                               LOG.debug("Audit log of auditEvent: [" + 
auditEvent.toString() + "] - END.");
-                       }
-               }
-               catch(Throwable t) {
-                       LOG.error("ERROR during audit log of auditEvent: [" + 
auditEvent.toString() + "]", t);
-               }
-       }
-       
-       
-       private static String getRemoteIp() {
-               String ret = null ;
-               InetAddress ip = Server.getRemoteIp() ;
-               if (ip != null) {
-                       ret = ip.getHostAddress();
-               }
-               return ret ;
-       }
-       
-       
-       public static Date getUTCDate() {
-               Calendar local=Calendar.getInstance();
-           int offset = local.getTimeZone().getOffset(local.getTimeInMillis());
-           GregorianCalendar utc = new 
GregorianCalendar(TimeZone.getTimeZone("GMT+0"));
-           utc.setTimeInMillis(local.getTimeInMillis());
-           utc.add(Calendar.MILLISECOND, -offset);
-           return utc.getTime();
-       }
-
-}
-
-class LogEventInfo {
-       String moduleName ;
-       String userName ;
-       String path ;
-       FsAction access ;
-       boolean accessGranted ;
-       
-       LogEventInfo(String moduleName,  String username, String path, FsAction 
access, boolean accessGranted) {
-               this.moduleName = moduleName ;
-               this.userName = username ;
-               this.path = path ;
-               this.access = access ;
-               this.accessGranted = accessGranted;
-       }
-
-       public String getModuleName() {
-               return moduleName;
-       }
-
-       public String getUserName() {
-               return userName;
-       }
-
-       public String getPath() {
-               return path;
-       }
-
-       public FsAction getAccess() {
-               return access;
-       }
-
-       public boolean isAccessGranted() {
-               return accessGranted;
-       }
-       
-       
-       
-}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifier.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifier.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifier.java
new file mode 100644
index 0000000..1a0cb0b
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifier.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.authorization.hadoop;
+
+import java.util.Set;
+
+public interface HDFSAccessVerifier {
+       public class AccessContext {
+               String agentId;
+               int repositoryType;
+               String sessionId;
+               String clientType;
+               String clientIP;
+               String requestData;
+       }
+       
+       public boolean isAccessGranted(String aPathName, String aPathOwnerName, 
String access, String username, Set<String> groups);
+       public boolean isAuditLogEnabled(String aPathName) ;
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifierFactory.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifierFactory.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifierFactory.java
new file mode 100644
index 0000000..c4d262a
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/HDFSAccessVerifierFactory.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.authorization.hadoop;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
+import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
+
+public class HDFSAccessVerifierFactory {
+       
+       private static final Log LOG = 
LogFactory.getLog(HDFSAccessVerifierFactory.class) ;
+
+       private static HDFSAccessVerifier hdfsAccessVerifier = null ;
+       
+       public static HDFSAccessVerifier getInstance() {
+               if (hdfsAccessVerifier == null) {
+                       synchronized(HDFSAccessVerifierFactory.class) {
+                               HDFSAccessVerifier temp = hdfsAccessVerifier ;
+                               if (temp == null) {
+                                       
+                                       String hdfsAccessVerifierClassName = 
RangerConfiguration.getInstance().get(RangerHadoopConstants.HDFS_ACCESS_VERIFIER_CLASS_NAME_PROP,
 RangerHadoopConstants.HDFS_ACCESS_VERIFIER_CLASS_NAME_DEFAULT_VALUE) ;
+                                       if (hdfsAccessVerifierClassName != 
null) {
+                                               try {
+                                                       
hdfsAccessVerifierClassName = hdfsAccessVerifierClassName.trim();
+                                                       hdfsAccessVerifier = 
(HDFSAccessVerifier) (Class.forName(hdfsAccessVerifierClassName).newInstance()) 
;
+                                                       LOG.info("Created a new 
instance of class: [" + hdfsAccessVerifierClassName + "] for HDFS Access 
verification.");
+                                               } catch (InstantiationException 
e) {
+                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
+                                               } catch (IllegalAccessException 
e) {
+                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
+                                               } catch (ClassNotFoundException 
e) {
+                                                       LOG.error("Unable to 
create HdfsAccessVerifier Verifier: [" +  hdfsAccessVerifierClassName + "]", e);
+                                               }
+                                       }
+                               }
+                       }
+               }
+               return hdfsAccessVerifier ;
+               
+       }
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
new file mode 100644
index 0000000..b7fd50a
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/AuthCodeInjectionJavaAgent.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.authorization.hadoop.agent;
+
+import java.lang.instrument.Instrumentation;
+
+public class AuthCodeInjectionJavaAgent {
+       public static final String AUTHORIZATION_AGENT_PARAM = "authagent";
+
+       public static void premain(String agentArgs, Instrumentation inst) {
+               if (agentArgs != null && 
AUTHORIZATION_AGENT_PARAM.equalsIgnoreCase(agentArgs.trim())) {
+                       inst.addTransformer(new HadoopAuthClassTransformer());
+               }
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java
new file mode 100644
index 0000000..a4c1d45
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.authorization.hadoop.agent;
+
+import java.io.IOException;
+import java.lang.instrument.ClassFileTransformer;
+import java.lang.instrument.IllegalClassFormatException;
+import java.security.ProtectionDomain;
+
+import javassist.CannotCompileException;
+import javassist.ClassPool;
+import javassist.CtClass;
+import javassist.CtMethod;
+import javassist.NotFoundException;
+
+public class HadoopAuthClassTransformer implements ClassFileTransformer {
+
+       byte[] transformedClassByteCode = null ;
+       
+       @Override
+       public byte[] transform(ClassLoader aClassLoader, String aClassName, 
Class<?> aClassBeingRedefined, ProtectionDomain aProtectionDomain, byte[] 
aClassFileBuffer) throws IllegalClassFormatException {
+
+               byte[] byteCode = aClassFileBuffer;
+               if 
(aClassName.equals("org/apache/hadoop/hdfs/server/namenode/FSPermissionChecker"))
 {
+                       System.out.println("Injection code is Invoked in JVM [" 
+ Runtime.getRuntime() + "] for class [" + aClassBeingRedefined + "] ....");
+                       try {
+                               if (transformedClassByteCode == null) {
+                                       ClassPool cp = ClassPool.getDefault();
+                                       String curClassName = 
aClassName.replaceAll("/", ".");
+                                       CtClass curClass = cp.get(curClassName);
+                                       
+                                       
+                                       CtClass inodeClass = null, 
snapShotClass = null, fsActionClass = null  ;
+                                       String paramClassName = null ;
+                                       
+                                       try {
+                                               paramClassName = 
"org.apache.hadoop.hdfs.server.namenode.INode" ;
+                                               inodeClass = 
cp.get(paramClassName) ;
+                                       } catch (javassist.NotFoundException 
nfe) {
+                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
+                                               inodeClass = null ;
+                                       }
+
+
+                                       try {
+                                               paramClassName = 
"org.apache.hadoop.fs.permission.FsAction" ;
+                                               fsActionClass = 
cp.get(paramClassName) ;
+                                       } catch (javassist.NotFoundException 
nfe) {
+                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
+                                               fsActionClass = null ;
+                                       }
+                                       
+                                       try {
+                                               paramClassName = 
"org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot" ;
+                                               snapShotClass = 
cp.get(paramClassName) ;
+                                       } catch (javassist.NotFoundException 
nfe) {
+                                               System.err.println("Unable to 
find Class for [" + paramClassName + "]" + nfe) ;
+                                               snapShotClass = null ;
+                                       }
+                                       
+                                       boolean injected = false ;
+                                       boolean injected_cm = false ;
+                                       boolean withIntParamInMiddle = false ;
+
+                                       
+                                       try {
+                                               
+                                               CtClass[] paramArgs = null ;
+                                               
+                                               if (inodeClass != null && 
fsActionClass != null) {
+
+                                                       CtMethod checkMethod = 
null ;
+                                                       
+                                                       if (snapShotClass != 
null) {
+                                                               paramArgs = new 
CtClass[] { inodeClass, snapShotClass, fsActionClass } ;
+                                                               try {
+                                                                       
checkMethod = curClass.getDeclaredMethod("check", paramArgs);
+                                                               }
+                                                               
catch(NotFoundException SSnfe) {
+                                                                       
System.out.println("Unable to find check method with snapshot class. Trying to 
find check method without snapshot support.") ;
+                                                                       
snapShotClass = null;
+                                                                       
paramArgs = new CtClass[] { inodeClass, CtClass.intType,  fsActionClass } ;
+                                                                       
checkMethod = curClass.getDeclaredMethod("check", paramArgs);
+                                                                       
withIntParamInMiddle = true ;
+                                                                       
System.out.println("Found method check() - without snapshot support") ;
+                                                               }
+                                                       }
+                                                       else {
+                                                               
System.out.println("Snapshot class was already null ... Trying to find check 
method") ;
+                                                               paramArgs = new 
CtClass[] { inodeClass, fsActionClass } ;
+                                                               checkMethod = 
curClass.getDeclaredMethod("check", paramArgs);
+                                                               
System.out.println("Found method check() - without snapshot support") ;
+                                                       }
+                                               
+                                                       if (checkMethod != 
null) {
+                                                               if 
(snapShotClass == null && (!withIntParamInMiddle)) {
+                                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$2,true)
 ;");
+                                                                       CtClass 
throwable = ClassPool.getDefault().get("java.lang.Throwable");
+                                                                       
checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$2,false)
 ; throw $e; }", throwable);
+                                                                       
checkMethod.insertBefore("{ if ( 
org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.check(ugi,$1,$2)
 ) { return ; } }");
+                                                               }
+                                                               else {
+                                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$3,true)
 ;");
+                                                                       CtClass 
throwable = ClassPool.getDefault().get("java.lang.Throwable");
+                                                                       
checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$3,false)
 ; throw $e; }", throwable);    
+                                                                       
checkMethod.insertBefore("{ if ( 
org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.check(ugi,$1,$3)
 ) { return ; } }");
+                                                               }
+                                                               
System.out.println("Injection of code is successfull ....");
+                                                       }
+                                                       else {
+                                                               
System.out.println("Injection failed. Unable to identify check() method on 
class: [" + curClass.getName() + "]. Continue without Injection ...") ; 
+                                                       }
+                                                       
+                                                       injected = true ;
+                                               }
+                                       } catch (NotFoundException nfex) {
+                                               nfex.printStackTrace();
+                                               System.out.println("Unable to 
find the check() method with expected params in [" + aClassName + "] ....");
+                                               for (CtMethod m : 
curClass.getDeclaredMethods()) {
+                                                       
System.err.println("Found Method: " + m);
+                                               }
+                                       }
+                                       
+                                       
+                                       try {
+                                               
+                                               CtMethod checkMethod = 
curClass.getDeclaredMethod("checkPermission");
+                                               
+                                               if (checkMethod != null) {
+                                                       
checkMethod.insertBefore("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.checkPermissionPre($1)
 ;");
+                                                       
checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.writeLog($1)
 ;");
+                                                       CtClass throwable = 
ClassPool.getDefault().get("org.apache.hadoop.security.AccessControlException");
+                                                       checkMethod.addCatch("{ 
org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.writeLog($1); 
throw $e; }", throwable);        
+                                                       injected_cm = true ;
+                                               }
+
+                                       } catch (NotFoundException nfe) {
+                                               nfe.printStackTrace();
+                                               System.out.println("Unable to 
find the checkPermission() method with expected params in [" + aClassName + "] 
....");
+                                               for (CtMethod m : 
curClass.getDeclaredMethods()) {
+                                                       
System.err.println("Found Method: " + m);
+                                               }
+                                       }
+                                       
+                                       System.out.println("Injected: " + 
injected + ", Injected_CheckMethod: " + injected_cm ) ;
+                                       
+                                       if (injected) {
+                                               byteCode = 
curClass.toBytecode();
+                                               if (transformedClassByteCode == 
null) {
+                                                       
synchronized(HadoopAuthClassTransformer.class) {
+                                                               byte[] temp = 
transformedClassByteCode ;
+                                                               if (temp == 
null) {
+                                                                       
transformedClassByteCode = byteCode;
+                                                               }
+                                                       }
+                                               }
+                                       }
+                                       
+                               }
+                               else {
+                                       byteCode = transformedClassByteCode;
+                                       System.out.println("Injection of code 
(using existing bytecode) is successfull ....");
+                               }
+                       } catch (NotFoundException e) {
+                               System.err.println("Class Not Found Exception 
for class Name: " + aClassName + " Exception: " + e);
+                               e.printStackTrace();
+                       } catch (CannotCompileException e) {
+                               System.err.println("Can not compile Exception 
for class Name: " + aClassName + " Exception: " + e);
+                               e.printStackTrace();
+                       } catch (IOException e) {
+                               System.err.println("IO Exception for class 
Name: " + aClassName + " Exception: " + e);
+                               e.printStackTrace();
+                       }
+               
+               }
+               
+               return byteCode;
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/exceptions/RangerAccessControlException.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/exceptions/RangerAccessControlException.java
 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/exceptions/RangerAccessControlException.java
new file mode 100644
index 0000000..68ca895
--- /dev/null
+++ 
b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/exceptions/RangerAccessControlException.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.ranger.authorization.hadoop.exceptions;
+
+import org.apache.hadoop.security.AccessControlException;
+
+
+public class RangerAccessControlException extends AccessControlException {
+
+       private static final long serialVersionUID = -4673975720243484927L;
+
+       public RangerAccessControlException(String aMsg) {
+               super(aMsg) ;
+       }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/413fcb68/hdfs-agent/src/main/resources/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/hdfs-agent/src/main/resources/META-INF/MANIFEST.MF 
b/hdfs-agent/src/main/resources/META-INF/MANIFEST.MF
index 0d9d2b4..a8ea649 100644
--- a/hdfs-agent/src/main/resources/META-INF/MANIFEST.MF
+++ b/hdfs-agent/src/main/resources/META-INF/MANIFEST.MF
@@ -1 +1 @@
-premain-class: 
com.xasecure.authorization.hadoop.agent.AuthCodeInjectionJavaAgent
+premain-class: 
org.apache.ranger.authorization.hadoop.agent.AuthCodeInjectionJavaAgent

Reply via email to