http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/agents-audit/src/main/java/org/apache/ranger/audit/provider/BufferedAuditProvider.java
----------------------------------------------------------------------
diff --git 
a/agents-audit/src/main/java/org/apache/ranger/audit/provider/BufferedAuditProvider.java
 
b/agents-audit/src/main/java/org/apache/ranger/audit/provider/BufferedAuditProvider.java
index cb2b385..be32519 100644
--- 
a/agents-audit/src/main/java/org/apache/ranger/audit/provider/BufferedAuditProvider.java
+++ 
b/agents-audit/src/main/java/org/apache/ranger/audit/provider/BufferedAuditProvider.java
@@ -20,6 +20,7 @@ package org.apache.ranger.audit.provider;
 import java.util.Properties;
 
 import org.apache.ranger.audit.model.AuditEventBase;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 
 public abstract class BufferedAuditProvider extends BaseAuditProvider {
        private LogBuffer<AuditEventBase>      mBuffer      = null;
@@ -32,16 +33,20 @@ public abstract class BufferedAuditProvider extends 
BaseAuditProvider {
 
        @Override
        public void log(AuditEventBase event) {
-               if(event.getAgentHostname() == null) {
-                       event.setAgentHostname(MiscUtil.getHostname());
-               }
+               if(event instanceof AuthzAuditEvent) {
+                       AuthzAuditEvent authzEvent = (AuthzAuditEvent)event;
 
-               if(event.getLogType() == null) {
-                       event.setLogType("RangerAudit");
-               }
+                       if(authzEvent.getAgentHostname() == null) {
+                               
authzEvent.setAgentHostname(MiscUtil.getHostname());
+                       }
+
+                       if(authzEvent.getLogType() == null) {
+                               authzEvent.setLogType("RangerAudit");
+                       }
 
-               if(event.getEventId() == null) {
-                       event.setEventId(MiscUtil.generateUniqueId());
+                       if(authzEvent.getEventId() == null) {
+                               
authzEvent.setEventId(MiscUtil.generateUniqueId());
+                       }
                }
 
                if(! mBuffer.add(event)) {

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
----------------------------------------------------------------------
diff --git 
a/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
 
b/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
index 0c2bca6..0f429ea 100644
--- 
a/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
+++ 
b/agents-audit/src/main/java/org/apache/ranger/audit/provider/MultiDestAuditProvider.java
@@ -24,11 +24,6 @@ import java.util.Properties;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.ranger.audit.model.AuditEventBase;
-import org.apache.ranger.audit.model.HBaseAuditEvent;
-import org.apache.ranger.audit.model.HdfsAuditEvent;
-import org.apache.ranger.audit.model.HiveAuditEvent;
-import org.apache.ranger.audit.model.KnoxAuditEvent;
-import org.apache.ranger.audit.model.StormAuditEvent;
 
 
 public class MultiDestAuditProvider extends BaseAuditProvider {

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/agents-audit/src/main/java/org/apache/ranger/audit/test/TestEvents.java
----------------------------------------------------------------------
diff --git 
a/agents-audit/src/main/java/org/apache/ranger/audit/test/TestEvents.java 
b/agents-audit/src/main/java/org/apache/ranger/audit/test/TestEvents.java
index 34b8e4b..bf00450 100644
--- a/agents-audit/src/main/java/org/apache/ranger/audit/test/TestEvents.java
+++ b/agents-audit/src/main/java/org/apache/ranger/audit/test/TestEvents.java
@@ -21,11 +21,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.log4j.xml.DOMConfigurator;
 import org.apache.ranger.audit.model.AuditEventBase;
-import org.apache.ranger.audit.model.HBaseAuditEvent;
-import org.apache.ranger.audit.model.HdfsAuditEvent;
-import org.apache.ranger.audit.model.HiveAuditEvent;
-import org.apache.ranger.audit.model.KnoxAuditEvent;
-import org.apache.ranger.audit.model.StormAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
+import org.apache.ranger.audit.model.EnumRepositoryType;
 import org.apache.ranger.audit.provider.AuditProvider;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.audit.provider.AuditProviderFactory.ApplicationType;
@@ -38,115 +35,142 @@ import java.util.Properties;
 
 public class TestEvents {
 
-       private static final Log LOG = LogFactory.getLog(TestEvents.class);
+    private static final Log LOG = LogFactory.getLog(TestEvents.class);
 
     public static void main(String[] args) {
-       DOMConfigurator.configure("log4j.xml");
+        DOMConfigurator.configure("log4j.xml");
 
         LOG.info("==> TestEvents.main()");
-        
+
         try {
-               Properties auditProperties = new Properties();
-               
-               String AUDIT_PROPERTIES_FILE = "xasecure-audit.properties";
-               
-               File propFile = new File(AUDIT_PROPERTIES_FILE);
-               
-               if(propFile.exists()) {
-               LOG.info("Loading Audit properties file" + 
AUDIT_PROPERTIES_FILE);
-
-               auditProperties.load(new FileInputStream(propFile));
-               } else {
-               LOG.info("Audit properties file missing: " + 
AUDIT_PROPERTIES_FILE);
-
-               
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.url", 
"jdbc:mysql://localhost:3306/xa_db");
-                       
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.user", 
"xaaudit");
-                       
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.password",
 "xaaudit");
-                       
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.driver", 
"com.mysql.jdbc.Driver");
-       
-                       
auditProperties.setProperty("xasecure.audit.is.enabled", "true");
-                       
auditProperties.setProperty("xasecure.audit.log4j.is.enabled", "false");
-                       
auditProperties.setProperty("xasecure.audit.log4j.is.async", "false");
-                       
auditProperties.setProperty("xasecure.audit.log4j.async.max.queue.size", 
"100000");
-                       
auditProperties.setProperty("xasecure.audit.log4j.async.max.flush.interval.ms", 
"30000");
-                       
auditProperties.setProperty("xasecure.audit.db.is.enabled", "true");
-                       
auditProperties.setProperty("xasecure.audit.db.is.async", "true");
-                       
auditProperties.setProperty("xasecure.audit.db.async.max.queue.size", "100000");
-                       
auditProperties.setProperty("xasecure.audit.db.async.max.flush.interval.ms", 
"30000");
-                       
auditProperties.setProperty("xasecure.audit.db.batch.size", "100");
-               }
-               
-               AuditProviderFactory.getInstance().init(auditProperties, 
ApplicationType.Hdfs);
-
-               AuditProvider provider = 
AuditProviderFactory.getAuditProvider();
-
-               LOG.info("provider=" + provider.toString());
-
-               String strEventCount          = args.length > 0 ? args[0] : 
auditProperties.getProperty("xasecure.audit.test.event.count");
-               String strEventPauseTimeInMs  = args.length > 1 ? args[1] : 
auditProperties.getProperty("xasecure.audit.test.event.pause.time.ms");
-               String strSleepTimeBeforeExit = args.length > 2 ? args[2] : 
auditProperties.getProperty("xasecure.audit.test.sleep.time.before.exit.seconds");
-
-               int eventCount          = (strEventCount == null) ? 1024 : 
Integer.parseInt(strEventCount);
-               int eventPauseTime      = (strEventPauseTimeInMs == null) ? 0 : 
Integer.parseInt(strEventPauseTimeInMs);
-               int sleepTimeBeforeExit = ((strSleepTimeBeforeExit == null) ? 0 
: Integer.parseInt(strSleepTimeBeforeExit)) * 1000;
-
-               for(int i = 0; i < eventCount; i++) {
-                       AuditEventBase event = getTestEvent(i);
-
-                   LOG.info("==> TestEvents.main(" + (i+1) + "): adding " + 
event.getClass().getName());
-                       provider.log(event);
-
-                       if(eventPauseTime > 0) {
-                               Thread.sleep(eventPauseTime);
-                       }
-               }
+            Properties auditProperties = new Properties();
+
+            String AUDIT_PROPERTIES_FILE = "xasecure-audit.properties";
+
+            File propFile = new File(AUDIT_PROPERTIES_FILE);
+
+            if(propFile.exists()) {
+                LOG.info("Loading Audit properties file" + 
AUDIT_PROPERTIES_FILE);
+
+                auditProperties.load(new FileInputStream(propFile));
+            } else {
+                LOG.info("Audit properties file missing: " + 
AUDIT_PROPERTIES_FILE);
+
+                
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.url", 
"jdbc:mysql://localhost:3306/xa_db");
+                
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.user", 
"xaaudit");
+                
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.password",
 "xaaudit");
+                
auditProperties.setProperty("xasecure.audit.jpa.javax.persistence.jdbc.driver", 
"com.mysql.jdbc.Driver");
+
+                auditProperties.setProperty("xasecure.audit.is.enabled", 
"true");
+                auditProperties.setProperty("xasecure.audit.log4j.is.enabled", 
"false");
+                auditProperties.setProperty("xasecure.audit.log4j.is.async", 
"false");
+                
auditProperties.setProperty("xasecure.audit.log4j.async.max.queue.size", 
"100000");
+                
auditProperties.setProperty("xasecure.audit.log4j.async.max.flush.interval.ms", 
"30000");
+                auditProperties.setProperty("xasecure.audit.db.is.enabled", 
"true");
+                auditProperties.setProperty("xasecure.audit.db.is.async", 
"true");
+                
auditProperties.setProperty("xasecure.audit.db.async.max.queue.size", "100000");
+                
auditProperties.setProperty("xasecure.audit.db.async.max.flush.interval.ms", 
"30000");
+                auditProperties.setProperty("xasecure.audit.db.batch.size", 
"100");
+            }
+
+            AuditProviderFactory.getInstance().init(auditProperties, 
ApplicationType.Hdfs);
+
+            AuditProvider provider = AuditProviderFactory.getAuditProvider();
+
+            LOG.info("provider=" + provider.toString());
+
+            String strEventCount          = args.length > 0 ? args[0] : 
auditProperties.getProperty("xasecure.audit.test.event.count");
+            String strEventPauseTimeInMs  = args.length > 1 ? args[1] : 
auditProperties.getProperty("xasecure.audit.test.event.pause.time.ms");
+            String strSleepTimeBeforeExit = args.length > 2 ? args[2] : 
auditProperties.getProperty("xasecure.audit.test.sleep.time.before.exit.seconds");
+
+            int eventCount          = (strEventCount == null) ? 1024 : 
Integer.parseInt(strEventCount);
+            int eventPauseTime      = (strEventPauseTimeInMs == null) ? 0 : 
Integer.parseInt(strEventPauseTimeInMs);
+            int sleepTimeBeforeExit = ((strSleepTimeBeforeExit == null) ? 0 : 
Integer.parseInt(strSleepTimeBeforeExit)) * 1000;
+
+            for(int i = 0; i < eventCount; i++) {
+                AuditEventBase event = getTestEvent(i);
+
+                LOG.info("==> TestEvents.main(" + (i+1) + "): adding " + 
event.getClass().getName());
+                provider.log(event);
+
+                if(eventPauseTime > 0) {
+                    Thread.sleep(eventPauseTime);
+                }
+            }
 
             provider.waitToComplete();
-            
+
             // incase of HdfsAuditProvider, logs are saved to local file 
system which gets sent to HDFS asynchronusly in a separate thread.
             // So, at this point it is possible that few local log files 
haven't made to HDFS.
             if(sleepTimeBeforeExit > 0) {
-               LOG.info("waiting for " + sleepTimeBeforeExit + "ms before 
exiting..");
+                LOG.info("waiting for " + sleepTimeBeforeExit + "ms before 
exiting..");
 
-               try {
-                       Thread.sleep(sleepTimeBeforeExit);
-               } catch(Exception excp) {
-                       LOG.info("error while waiting before exiting..");
-               }
+                try {
+                    Thread.sleep(sleepTimeBeforeExit);
+                } catch(Exception excp) {
+                    LOG.info("error while waiting before exiting..");
+                }
             }
 
             provider.stop();
         } catch(Exception excp) {
             LOG.info(excp.getLocalizedMessage());
-               excp.printStackTrace();
+            excp.printStackTrace();
         }
 
         LOG.info("<== TestEvents.main()");
     }
-    
+
     private static AuditEventBase getTestEvent(int idx) {
-       AuditEventBase event = null;
- 
-               switch(idx % 5) {
-                       case 0:
-                               event = new HdfsAuditEvent();
-                       break;
-                       case 1:
-                               event = new HBaseAuditEvent();
-                       break;
-                       case 2:
-                               event = new HiveAuditEvent();
-                       break;
-                       case 3:
-                               event = new KnoxAuditEvent();
-                       break;
-                       case 4:
-                               event = new StormAuditEvent();
-                       break;
-               }
-               event.setEventTime(new Date());
-               event.setResultReason(Integer.toString(idx));
-
-               return event;
+        AuthzAuditEvent event = new AuthzAuditEvent();
+
+        event.setClientIP("127.0.0.1");
+        event.setAccessResult((short)(idx % 2 > 0 ? 1 : 0));
+        event.setAclEnforcer("xasecure-acl");
+
+        switch(idx % 5) {
+            case 0:
+                event.setRepositoryName("hdfsdev");
+                event.setRepositoryType(EnumRepositoryType.HDFS);
+                event.setResourcePath("/tmp/test-audit.log");
+                event.setResourceType("file");
+                event.setAccessType("read");
+                if(idx % 2 > 0) {
+                    event.setAclEnforcer("hadoop-acl");
+                }
+            break;
+            case 1:
+                event.setRepositoryName("hbasedev");
+                event.setRepositoryType(EnumRepositoryType.HBASE);
+                event.setResourcePath("test_table/test_cf/test_col");
+                event.setResourceType("column");
+                event.setAccessType("read");
+            break;
+            case 2:
+                event.setRepositoryName("hivedev");
+                event.setRepositoryType(EnumRepositoryType.HIVE);
+                event.setResourcePath("test_database/test_table/test_col");
+                event.setResourceType("column");
+                event.setAccessType("select");
+            break;
+            case 3:
+                event.setRepositoryName("knoxdev");
+                event.setRepositoryType(EnumRepositoryType.KNOX);
+                event.setResourcePath("topologies/ranger-admin");
+                event.setResourceType("service");
+                event.setAccessType("get");
+            break;
+            case 4:
+                event.setRepositoryName("stormdev");
+                event.setRepositoryType(EnumRepositoryType.STORM);
+                event.setResourcePath("topologies/read-finance-stream");
+                event.setResourceType("topology");
+                event.setAccessType("submit");
+            break;
+        }
+        event.setEventTime(new Date());
+        event.setResultReason(Integer.toString(idx));
+
+        return event;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/agents-audit/src/main/resources/META-INF/persistence.xml
----------------------------------------------------------------------
diff --git a/agents-audit/src/main/resources/META-INF/persistence.xml 
b/agents-audit/src/main/resources/META-INF/persistence.xml
index 21b8f06..e130951 100644
--- a/agents-audit/src/main/resources/META-INF/persistence.xml
+++ b/agents-audit/src/main/resources/META-INF/persistence.xml
@@ -17,12 +17,7 @@
 -->
 <persistence version="2.0" xmlns="http://java.sun.com/xml/ns/persistence"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://java.sun.com/xml/ns/persistence 
http://java.sun.com/xml/ns/persistence/persistence_2_0.xsd";>
        <persistence-unit name="xa_server">
-               <class>org.apache.ranger.audit.entity.XXBaseAuditEvent</class>
-               <class>org.apache.ranger.audit.entity.XXHBaseAuditEvent</class>
-               <class>org.apache.ranger.audit.entity.XXHdfsAuditEvent</class>
-               <class>org.apache.ranger.audit.entity.XXHiveAuditEvent</class>
-               <class>org.apache.ranger.audit.entity.XXKnoxAuditEvent</class>
-               <class>org.apache.ranger.audit.entity.XXStormAuditEvent</class>
+               
<class>org.apache.ranger.audit.entity.AuthzAuditEventDbObj</class>
 
                <properties>
                        <property name="eclipselink.logging.level" 
value="SEVERE"/>

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/agents-impl/src/main/java/org/apache/ranger/pdp/knox/filter/RangerPDPKnoxFilter.java
----------------------------------------------------------------------
diff --git 
a/agents-impl/src/main/java/org/apache/ranger/pdp/knox/filter/RangerPDPKnoxFilter.java
 
b/agents-impl/src/main/java/org/apache/ranger/pdp/knox/filter/RangerPDPKnoxFilter.java
index 7eb498e..948bc6a 100644
--- 
a/agents-impl/src/main/java/org/apache/ranger/pdp/knox/filter/RangerPDPKnoxFilter.java
+++ 
b/agents-impl/src/main/java/org/apache/ranger/pdp/knox/filter/RangerPDPKnoxFilter.java
@@ -40,7 +40,7 @@ import org.apache.hadoop.gateway.security.GroupPrincipal;
 import org.apache.hadoop.gateway.security.ImpersonatedPrincipal;
 import org.apache.hadoop.gateway.security.PrimaryPrincipal;
 import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.KnoxAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.audit.provider.AuditProvider;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
@@ -183,7 +183,7 @@ public class RangerPDPKnoxFilter implements Filter {
                        String topology, String service,
                        String accessType, boolean accessGranted) {
 
-               KnoxAuditEvent auditEvent = new KnoxAuditEvent();
+               AuthzAuditEvent auditEvent = new AuthzAuditEvent();
 
                auditEvent.setUser(userName == null ? 
                                RangerHadoopConstants.AUDITLOG_EMPTY_STRING : 
userName);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java
----------------------------------------------------------------------
diff --git 
a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java
 
b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java
index 97d17e2..9401245 100644
--- 
a/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java
+++ 
b/hbase-agent/src/main/java/org/apache/ranger/authorization/hbase/RangerAuthorizationCoprocessor.java
@@ -93,7 +93,7 @@ import org.apache.ranger.admin.client.RangerAdminRESTClient;
 import org.apache.ranger.admin.client.datatype.GrantRevokeData;
 import org.apache.ranger.admin.client.datatype.GrantRevokeData.PermMap;
 import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.HBaseAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
 import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
@@ -982,7 +982,7 @@ public class RangerAuthorizationCoprocessor extends 
RangerAuthorizationCoprocess
                                resourceType = "column";
                        }
                        
-                       HBaseAuditEvent auditEvent = new HBaseAuditEvent();
+                       AuthzAuditEvent auditEvent = new AuthzAuditEvent();
 
                        auditEvent.setAclEnforcer(RangerModuleName);
                        auditEvent.setResourceType(resourceType);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
----------------------------------------------------------------------
diff --git 
a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
 
b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
index f558013..1c9017c 100644
--- 
a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
+++ 
b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java
@@ -38,7 +38,7 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.HdfsAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.HDFSAccessVerifier;
 import org.apache.ranger.authorization.hadoop.HDFSAccessVerifierFactory;
@@ -217,7 +217,7 @@ public class RangerFSPermissionChecker {
                
                String accessType = ( (e.getAccess() == null) ? 
RangerHadoopConstants.AUDITLOG_EMPTY_STRING : e.getAccess().toString() ) ;
                
-               HdfsAuditEvent auditEvent = new HdfsAuditEvent();
+               AuthzAuditEvent auditEvent = new AuthzAuditEvent();
 
                auditEvent.setUser(username);
                auditEvent.setResourcePath(requestedPath);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
----------------------------------------------------------------------
diff --git 
a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
 
b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
index cc56f58..81a86f1 100644
--- 
a/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
+++ 
b/hive-agent/src/main/java/org/apache/ranger/authorization/hive/authorizer/RangerHiveAuthorizer.java
@@ -46,7 +46,7 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.ranger.admin.client.RangerAdminRESTClient;
 import org.apache.ranger.admin.client.datatype.GrantRevokeData;
 import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.HiveAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
 import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
@@ -716,7 +716,7 @@ public class RangerHiveAuthorizer extends 
RangerHiveAuthorizerBase {
        }
 
     private void logAuditEventForDfs(UserGroupInformation ugi, String 
dfsCommand, boolean accessGranted) {
-               HiveAuditEvent auditEvent = new HiveAuditEvent();
+               AuthzAuditEvent auditEvent = new AuthzAuditEvent();
 
                try {
                        auditEvent.setAclEnforcer(RangerModuleName);
@@ -748,7 +748,7 @@ public class RangerHiveAuthorizer extends 
RangerHiveAuthorizerBase {
     }
 
        private void logAuditEvent(UserGroupInformation ugi, 
RangerHiveObjectAccessInfo objAccessInfo, boolean accessGranted) {
-               HiveAuditEvent auditEvent = new HiveAuditEvent();
+               AuthzAuditEvent auditEvent = new AuthzAuditEvent();
 
                try {
                        auditEvent.setAclEnforcer(RangerModuleName);

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/lookup-client/src/scripts/run-audit-test.sh
----------------------------------------------------------------------
diff --git a/lookup-client/src/scripts/run-audit-test.sh 
b/lookup-client/src/scripts/run-audit-test.sh
index 9dd00eb..a9d650d 100755
--- a/lookup-client/src/scripts/run-audit-test.sh
+++ b/lookup-client/src/scripts/run-audit-test.sh
@@ -16,11 +16,12 @@
 # limitations under the License.
 
 
-HADOOP_DIR=/usr/hdp/current/hadoop
+HADOOP_DIR=/usr/hdp/current/hadoop-client
+HADOOP_LIB_DIR=/usr/hdp/current/hadoop-client/lib
 HADOOP_CONF_DIR=/etc/hadoop/conf
 
-cp=
-for jar in $HADOOP_CONF_DIR $HADOOP_DIR/lib/* $HADOOP_DIR/client/*
+cp=./ranger-plugins-audit-0.4.0.jar
+for jar in $HADOOP_CONF_DIR $HADOOP_LIB_DIR/commons-logging-1.1.3.jar 
$HADOOP_LIB_DIR/log4j-1.2.17.jar $HADOOP_LIB_DIR/eclipselink-2.5.2-M1.jar 
$HADOOP_LIB_DIR/gson-2.2.4.jar $HADOOP_LIB_DIR/javax.persistence-2.1.0.jar 
$HADOOP_LIB_DIR/mysql-connector-java.jar $HADOOP_DIR/hadoop-common.jar
 do
   cp=${cp}:${jar}
 done

http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/2b00241a/storm-agent/src/main/java/org/apache/ranger/authorization/storm/authorizer/RangerStormAuthorizer.java
----------------------------------------------------------------------
diff --git 
a/storm-agent/src/main/java/org/apache/ranger/authorization/storm/authorizer/RangerStormAuthorizer.java
 
b/storm-agent/src/main/java/org/apache/ranger/authorization/storm/authorizer/RangerStormAuthorizer.java
index 89e4c4b..c49b613 100644
--- 
a/storm-agent/src/main/java/org/apache/ranger/authorization/storm/authorizer/RangerStormAuthorizer.java
+++ 
b/storm-agent/src/main/java/org/apache/ranger/authorization/storm/authorizer/RangerStormAuthorizer.java
@@ -24,7 +24,7 @@ import java.util.Map;
 
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.ranger.audit.model.EnumRepositoryType;
-import org.apache.ranger.audit.model.StormAuditEvent;
+import org.apache.ranger.audit.model.AuthzAuditEvent;
 import org.apache.ranger.audit.provider.AuditProviderFactory;
 import org.apache.ranger.authorization.hadoop.config.RangerConfiguration;
 import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants;
@@ -126,7 +126,7 @@ public class RangerStormAuthorizer implements IAuthorizer {
                        
                        if (isAuditEnabled) {
                                
-                               StormAuditEvent auditEvent = new 
StormAuditEvent() ;
+                               AuthzAuditEvent auditEvent = new 
AuthzAuditEvent() ;
        
                                String sessionId = null ;
                                String clientIp = null ;

Reply via email to