Repository: incubator-ranger Updated Branches: refs/heads/stack 2ca971756 -> 1e8dc41a8
RANGER-203: HDFS plugin update to use pluggable-service model and common policy engine (initial version). Project: http://git-wip-us.apache.org/repos/asf/incubator-ranger/repo Commit: http://git-wip-us.apache.org/repos/asf/incubator-ranger/commit/6a803eaa Tree: http://git-wip-us.apache.org/repos/asf/incubator-ranger/tree/6a803eaa Diff: http://git-wip-us.apache.org/repos/asf/incubator-ranger/diff/6a803eaa Branch: refs/heads/stack Commit: 6a803eaa1a569753974e2c550a58fa1c2cf7e443 Parents: 2ca9717 Author: Madhan Neethiraj <[email protected]> Authored: Thu Jan 22 18:31:53 2015 -0800 Committer: Madhan Neethiraj <[email protected]> Committed: Thu Jan 22 18:31:53 2015 -0800 ---------------------------------------------------------------------- hdfs-agent/pom.xml | 5 + .../namenode/RangerFSPermissionChecker.java | 422 ++++++++++--------- .../agent/HadoopAuthClassTransformer.java | 14 +- .../ranger/plugin/service/RangerBasePlugin.java | 23 +- .../ranger/plugin/store/file/BaseFileStore.java | 2 +- .../plugin/store/file/ServiceFileStore.java | 2 +- .../ranger/plugin/util/PolicyRefresher.java | 16 +- .../service-defs/ranger-servicedef-hdfs.json | 1 + 8 files changed, 265 insertions(+), 220 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/hdfs-agent/pom.xml ---------------------------------------------------------------------- diff --git a/hdfs-agent/pom.xml b/hdfs-agent/pom.xml index db0fbee..5867ac8 100644 --- a/hdfs-agent/pom.xml +++ b/hdfs-agent/pom.xml @@ -75,6 +75,11 @@ <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> </dependency> + <dependency> + <groupId>org.apache.ranger</groupId> + <artifactId>plugin-common</artifactId> + <version>${project.version}</version> + </dependency> </dependencies> <build> <!-- http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java index 1c9017c..ff60d52 100644 --- a/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java +++ b/hdfs-agent/src/main/java/org/apache/hadoop/hdfs/server/namenode/RangerFSPermissionChecker.java @@ -32,229 +32,202 @@ import java.util.Map; import java.util.Set; import java.util.TimeZone; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.ranger.audit.model.EnumRepositoryType; import org.apache.ranger.audit.model.AuthzAuditEvent; -import org.apache.ranger.audit.provider.AuditProviderFactory; -import org.apache.ranger.authorization.hadoop.HDFSAccessVerifier; -import org.apache.ranger.authorization.hadoop.HDFSAccessVerifierFactory; import org.apache.ranger.authorization.hadoop.config.RangerConfiguration; import org.apache.ranger.authorization.hadoop.constants.RangerHadoopConstants; import org.apache.ranger.authorization.hadoop.exceptions.RangerAccessControlException; +import org.apache.ranger.plugin.audit.RangerDefaultAuditHandler; +import org.apache.ranger.plugin.model.RangerServiceDef; +import org.apache.ranger.plugin.policyengine.RangerAccessRequest; +import org.apache.ranger.plugin.policyengine.RangerAccessRequestImpl; +import org.apache.ranger.plugin.policyengine.RangerAccessResult; +import org.apache.ranger.plugin.policyengine.RangerPolicyEngine; +import org.apache.ranger.plugin.policyengine.RangerPolicyEngineImpl; +import org.apache.ranger.plugin.policyengine.RangerResource; +import org.apache.ranger.plugin.service.RangerBasePlugin; public class RangerFSPermissionChecker { - - private static Map<FsAction, String[]> access2ActionListMapper = null ; - - private static HDFSAccessVerifier authorizer = null ; - - private static final String RangerModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ; - private static final String HadoopModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HADOOP_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_HADOOP_MODULE_ACL_NAME) ; - private static final boolean addHadoopAuth = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_PROP, RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_DEFAULT) ; - private static final String excludeUserList = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HDFS_EXCLUDE_LIST_PROP, RangerHadoopConstants.AUDITLOG_EMPTY_STRING) ; - private static final String repositoryName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_REPOSITORY_NAME_PROP); - private static final boolean isAuditEnabled = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.AUDITLOG_IS_ENABLED_PROP, true); - private static final Log LOG = LogFactory.getLog(RangerFSPermissionChecker.class); - private static HashSet<String> excludeUsers = null ; - - private static ThreadLocal<LogEventInfo> currentValidatedLogEvent = new ThreadLocal<LogEventInfo>() ; - + private static final boolean addHadoopAuth = RangerConfiguration.getInstance().getBoolean(RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_PROP, RangerHadoopConstants.RANGER_ADD_HDFS_PERMISSION_DEFAULT) ; - static { - access2ActionListMapper = new HashMap<FsAction, String[]>(); - access2ActionListMapper.put(FsAction.NONE, new String[] {}); - access2ActionListMapper.put(FsAction.ALL, new String[] { READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.READ, new String[] { READ_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.READ_WRITE, new String[] { READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.READ_EXECUTE, new String[] { READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.WRITE, new String[] { WRITE_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.WRITE_EXECUTE, new String[] { WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE }); - access2ActionListMapper.put(FsAction.EXECUTE, new String[] { EXECUTE_ACCCESS_TYPE }); - - if (excludeUserList != null && excludeUserList.trim().length() > 0) { - excludeUsers = new HashSet<String>() ; - for(String excludeUser : excludeUserList.trim().split(",")) { - excludeUser = excludeUser.trim() ; - if (LOG.isDebugEnabled()) { - LOG.debug("Adding exclude user [" + excludeUser + "]"); - } - excludeUsers.add(excludeUser) ; - } - } - RangerConfiguration.getInstance().initAudit(AuditProviderFactory.ApplicationType.Hdfs); - } + private static RangerHdfsPlugin rangerPlugin = null; + private static ThreadLocal<RangerHdfsAuditHandler> currentAuditHandler = new ThreadLocal<RangerHdfsAuditHandler>(); - public static boolean check(UserGroupInformation ugi, INode inode, FsAction access) throws RangerAccessControlException { - if (inode == null) { + public static boolean check(UserGroupInformation ugi, INode inode, FsAction access) throws RangerAccessControlException { + if (ugi == null || inode == null || access == null) { return false; } - String user = ugi.getShortUserName(); + String path = inode.getFullPathName(); + String pathOwner = inode.getUserName(); + String user = ugi.getShortUserName(); + Set<String> groups = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(ugi.getGroupNames()))); - Set<String> groups = Collections.unmodifiableSet(new HashSet<String>(Arrays.asList(ugi.getGroupNames()))); - - String pathOwnerName = inode.getUserName() ; + boolean accessGranted = AuthorizeAccessForUser(path, pathOwner, access, user, groups); - boolean accessGranted = AuthorizeAccessForUser(inode.getFullPathName(), pathOwnerName, access, user, groups); - if (!accessGranted && !addHadoopAuth ) { - String inodeInfo = (inode.isDirectory() ? "directory" : "file") + "=" + "\"" + inode.getFullPathName() + "\"" ; + String inodeInfo = (inode.isDirectory() ? "directory" : "file") + "=" + "\"" + path + "\"" ; throw new RangerAccessControlException("Permission denied: principal{user=" + user + ",groups: " + groups + "}, access=" + access + ", " + inodeInfo ) ; } - - return accessGranted ; + return accessGranted ; } public static boolean AuthorizeAccessForUser(String aPathName, String aPathOwnerName, FsAction access, String user, Set<String> groups) throws RangerAccessControlException { boolean accessGranted = false; - try { + + if(aPathName != null && aPathOwnerName != null && access != null && user != null && groups != null) { if (RangerHadoopConstants.HDFS_ROOT_FOLDER_PATH_ALT.equals(aPathName)) { aPathName = RangerHadoopConstants.HDFS_ROOT_FOLDER_PATH; } - - String[] accessTypes = access2ActionListMapper.get(access); - - if ((accessTypes == null) || (accessTypes.length == 0)) { - accessGranted = false; - } else { - - if (authorizer == null) { - synchronized(RangerFSPermissionChecker.class) { - HDFSAccessVerifier temp = authorizer ; - if (temp == null) { - try { - authorizer = HDFSAccessVerifierFactory.getInstance(); - } - catch(Throwable t) { - LOG.error("Unable to create Authorizer", t); - } + + if (rangerPlugin == null) { + synchronized(RangerFSPermissionChecker.class) { + RangerHdfsPlugin temp = rangerPlugin ; + if (temp == null) { + try { + temp = new RangerHdfsPlugin(); + temp.init(); + + rangerPlugin = temp; } - } - } - - if (authorizer != null) { - for (String accessType : accessTypes) { - accessGranted = authorizer.isAccessGranted(aPathName, aPathOwnerName, accessType, user, groups); - if (!accessGranted) { - break; + catch(Throwable t) { + LOG.error("Unable to create Authorizer", t); } } } } - } finally { - logEvent(RangerModuleName, user, aPathName, access, accessGranted); - } - return accessGranted; - } - - - public static void logHadoopEvent(UserGroupInformation ugi, INode inode, FsAction access, boolean accessGranted) { - String path = (inode == null) ? RangerHadoopConstants.AUDITLOG_EMPTY_STRING : inode.getFullPathName() ; - String username = (ugi == null) ? RangerHadoopConstants.AUDITLOG_EMPTY_STRING : ugi.getShortUserName() ; - logEvent(HadoopModuleName, username, path, access, accessGranted); - } - - + if (rangerPlugin != null && rangerPlugin.getPolicyEngine() != null) { + RangerHdfsAccessRequest request = new RangerHdfsAccessRequest(aPathName, aPathOwnerName, access, user, groups); - - - private static void logEvent(String moduleName, String username, String path, FsAction access, boolean accessGranted) { - LogEventInfo e = null; + RangerAccessResult result = rangerPlugin.getPolicyEngine().isAccessAllowed(request, getCurrentAuditHandler()); - if(isAuditEnabled) { - e = new LogEventInfo(moduleName, username, path, access, accessGranted) ; + accessGranted = result.getResult() == RangerAccessResult.Result.ALLOWED; + } } - currentValidatedLogEvent.set(e); + return accessGranted; } - - + public static void checkPermissionPre(String pathToBeValidated) { - // TODO: save the path in a thread-local + RangerHdfsAuditHandler auditHandler = new RangerHdfsAuditHandler(pathToBeValidated); + + currentAuditHandler.set(auditHandler); } - + public static void checkPermissionPost(String pathToBeValidated) { - writeLog(pathToBeValidated); - } + RangerHdfsAuditHandler auditHandler = getCurrentAuditHandler(); - public static void writeLog(String pathValidated) { - - LogEventInfo e = currentValidatedLogEvent.get(); - - if (e == null) { - return ; - } - - String username = e.getUserName() ; - - boolean skipLog = (username != null && excludeUsers != null && excludeUsers.contains(username)) ; - - if (skipLog) { - return ; + if(auditHandler != null) { + auditHandler.flushAudit(); } - String requestedPath = e.getPath() ; - - if (requestedPath == null) { - requestedPath = RangerHadoopConstants.AUDITLOG_EMPTY_STRING ; + currentAuditHandler.set(null); + } + + public static void logHadoopEvent(INode inode, boolean accessGranted) { + if(inode == null) { + return; } - if (! authorizer.isAuditLogEnabled(requestedPath)) { - return ; + RangerHdfsAuditHandler auditHandler = getCurrentAuditHandler(); + + if(auditHandler != null) { + auditHandler.logHadoopEvent(inode.getFullPathName(), accessGranted); } + } + + private static RangerHdfsAuditHandler getCurrentAuditHandler() { + return currentAuditHandler.get(); + } +} + +class RangerHdfsPlugin extends RangerBasePlugin { + public RangerHdfsPlugin() { + super("hdfs"); + } + + public void init() { + RangerPolicyEngine policyEngine = new RangerPolicyEngineImpl(); - - String accessType = ( (e.getAccess() == null) ? RangerHadoopConstants.AUDITLOG_EMPTY_STRING : e.getAccess().toString() ) ; - - AuthzAuditEvent auditEvent = new AuthzAuditEvent(); - - auditEvent.setUser(username); - auditEvent.setResourcePath(requestedPath); - auditEvent.setResourceType("HDFSPath") ; - auditEvent.setAccessType(accessType); - auditEvent.setAccessResult((short)(e.isAccessGranted() ? 1 : 0)); - auditEvent.setClientIP(getRemoteIp()); - auditEvent.setEventTime(getUTCDate()); - auditEvent.setAclEnforcer(e.getModuleName()); - auditEvent.setRepositoryType(EnumRepositoryType.HDFS); - auditEvent.setRepositoryName(repositoryName); - auditEvent.setResultReason(pathValidated); - - /* - * Review following audit fields for appropriate values - * - auditEvent.setAgentId(); - auditEvent.setPolicyId(); - auditEvent.setSessionId(); - auditEvent.setClientType(); - * - */ - - try { - if (LOG.isDebugEnabled()) { - LOG.debug("Audit log of auditEvent: [" + auditEvent.toString() + "] - START."); - } - AuditProviderFactory.getAuditProvider().log(auditEvent); - if (LOG.isDebugEnabled()) { - LOG.debug("Audit log of auditEvent: [" + auditEvent.toString() + "] - END."); - } - } - catch(Throwable t) { - LOG.error("ERROR during audit log of auditEvent: [" + auditEvent.toString() + "]", t); + super.init(policyEngine); + } +} + +class RangerHdfsResource implements RangerResource { + private String path = null; + private String owner = null; + + public RangerHdfsResource(String path, String owner) { + this.path = path; + this.owner = owner; + } + + @Override + public String getOwnerUser() { + return owner; + } + + @Override + public boolean exists(String name) { + return StringUtils.equalsIgnoreCase(name, "path"); + } + + @Override + public String getValue(String name) { + if(StringUtils.equalsIgnoreCase(name, "path")) { + return path; } + + return null; + } +} + +class RangerHdfsAccessRequest extends RangerAccessRequestImpl { + private static Map<FsAction, Set<String>> access2ActionListMapper = null ; + + static { + access2ActionListMapper = new HashMap<FsAction, Set<String>>(); + + access2ActionListMapper.put(FsAction.NONE, new HashSet<String>()); + access2ActionListMapper.put(FsAction.ALL, new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.READ, new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.READ_WRITE, new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, WRITE_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.READ_EXECUTE, new HashSet<String>(Arrays.asList(READ_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.WRITE, new HashSet<String>(Arrays.asList(WRITE_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.WRITE_EXECUTE, new HashSet<String>(Arrays.asList(WRITE_ACCCESS_TYPE, EXECUTE_ACCCESS_TYPE))); + access2ActionListMapper.put(FsAction.EXECUTE, new HashSet<String>(Arrays.asList(EXECUTE_ACCCESS_TYPE))); + } + + public RangerHdfsAccessRequest(String path, String pathOwner, FsAction access, String user, Set<String> groups) { + super.setResource(new RangerHdfsResource(path, pathOwner)); + super.setAccessTypes(access2ActionListMapper.get(access)); + super.setUser(user); + super.setUserGroups(groups); + super.setAccessTime(getUTCDate()); + super.setClientIPAddress(getRemoteIp()); + super.setAction(access.toString()); + } + + private static Date getUTCDate() { + Calendar local=Calendar.getInstance(); + int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); + GregorianCalendar utc = new GregorianCalendar(TimeZone.getTimeZone("GMT+0")); + utc.setTimeInMillis(local.getTimeInMillis()); + utc.add(Calendar.MILLISECOND, -offset); + return utc.getTime(); } - private static String getRemoteIp() { String ret = null ; @@ -264,54 +237,95 @@ public class RangerFSPermissionChecker { } return ret ; } - - - public static Date getUTCDate() { - Calendar local=Calendar.getInstance(); - int offset = local.getTimeZone().getOffset(local.getTimeInMillis()); - GregorianCalendar utc = new GregorianCalendar(TimeZone.getTimeZone("GMT+0")); - utc.setTimeInMillis(local.getTimeInMillis()); - utc.add(Calendar.MILLISECOND, -offset); - return utc.getTime(); - } - } -class LogEventInfo { - String moduleName ; - String userName ; - String path ; - FsAction access ; - boolean accessGranted ; - - LogEventInfo(String moduleName, String username, String path, FsAction access, boolean accessGranted) { - this.moduleName = moduleName ; - this.userName = username ; - this.path = path ; - this.access = access ; - this.accessGranted = accessGranted; - } +class RangerHdfsAuditHandler extends RangerDefaultAuditHandler { + private static final Log LOG = LogFactory.getLog(RangerHdfsAuditHandler.class); + + private String pathToBeValidated = null; + private boolean isAuditEnabled = false; + private AuthzAuditEvent auditEvent = null; + + private static final String RangerModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_RANGER_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_RANGER_MODULE_ACL_NAME) ; + private static final String HadoopModuleName = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HADOOP_MODULE_ACL_NAME_PROP , RangerHadoopConstants.DEFAULT_HADOOP_MODULE_ACL_NAME) ; + private static final String excludeUserList = RangerConfiguration.getInstance().get(RangerHadoopConstants.AUDITLOG_HDFS_EXCLUDE_LIST_PROP, RangerHadoopConstants.AUDITLOG_EMPTY_STRING) ; + private static HashSet<String> excludeUsers = null ; - public String getModuleName() { - return moduleName; + static { + if (excludeUserList != null && excludeUserList.trim().length() > 0) { + excludeUsers = new HashSet<String>() ; + for(String excludeUser : excludeUserList.trim().split(",")) { + excludeUser = excludeUser.trim() ; + if (LOG.isDebugEnabled()) { + LOG.debug("Adding exclude user [" + excludeUser + "]"); + } + excludeUsers.add(excludeUser) ; + } + } + + RangerConfiguration.getInstance().initAudit("hdfs"); } - public String getUserName() { - return userName; + public RangerHdfsAuditHandler(String pathToBeValidated) { + this.pathToBeValidated = pathToBeValidated; + + auditEvent = new AuthzAuditEvent(); } - public String getPath() { - return path; + @Override + public void logAudit(RangerAccessResult result) { + if(! isAuditEnabled) { + for(Map.Entry<String, RangerAccessResult.ResultDetail> e : result.getAccessTypeResults().entrySet()) { + RangerAccessResult.ResultDetail resDetail = e.getValue(); + + if(resDetail.isAudited()) { + isAuditEnabled = true; + + break; + } + } + } + + RangerAccessRequest request = result.getAccessRequest(); + RangerServiceDef serviceDef = result.getServiceDef(); + int serviceType = (serviceDef != null && serviceDef.getId() != null) ? serviceDef.getId().intValue() : -1; + String serviceName = result.getServiceName(); + String resourceType = getResourceName(request.getResource(), serviceDef); + String resourcePath = getResourceValueAsString(request.getResource(), serviceDef); + Long policyId = (result.getAccessTypeResults() != null && result.getAccessTypeResults().size() > 0) ? result.getAccessTypeResults().get(0).getPolicyId() : null; + + auditEvent.setUser(request.getUser()); + auditEvent.setResourcePath(pathToBeValidated); + auditEvent.setResourceType(resourceType) ; + auditEvent.setAccessType(request.getAction()); + auditEvent.setAccessResult((short)(result.getResult() == RangerAccessResult.Result.ALLOWED ? 1 : 0)); + auditEvent.setClientIP(request.getClientIPAddress()); + auditEvent.setEventTime(request.getAccessTime()); + auditEvent.setAclEnforcer(RangerModuleName); + auditEvent.setPolicyId(policyId != null ? policyId.longValue() : -1); + auditEvent.setRepositoryType(serviceType); + auditEvent.setRepositoryName(serviceName); + auditEvent.setResultReason(resourcePath); } - public FsAction getAccess() { - return access; + public void logHadoopEvent(String path, boolean accessGranted) { + auditEvent.setResultReason(path); + auditEvent.setAccessResult((short) (accessGranted ? 1 : 0)); + auditEvent.setAclEnforcer(HadoopModuleName); + auditEvent.setPolicyId(0); } - public boolean isAccessGranted() { - return accessGranted; + public void flushAudit() { + String username = auditEvent.getUser(); + + boolean skipLog = (username != null && excludeUsers != null && excludeUsers.contains(username)) ; + + if (skipLog) { + return ; + } + + if(isAuditEnabled) { + super.logAuthzAudit(auditEvent); + } } - - - } http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java ---------------------------------------------------------------------- diff --git a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java index a4c1d45..35d3981 100644 --- a/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java +++ b/hdfs-agent/src/main/java/org/apache/ranger/authorization/hadoop/agent/HadoopAuthClassTransformer.java @@ -110,17 +110,15 @@ public class HadoopAuthClassTransformer implements ClassFileTransformer { if (checkMethod != null) { if (snapShotClass == null && (!withIntParamInMiddle)) { - checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$2,true) ;"); - CtClass throwable = ClassPool.getDefault().get("java.lang.Throwable"); - checkMethod.addCatch("{ org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$2,false) ; throw $e; }", throwable); checkMethod.insertBefore("{ if ( org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.check(ugi,$1,$2) ) { return ; } }"); } else { - checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$3,true) ;"); - CtClass throwable = ClassPool.getDefault().get("java.lang.Throwable"); - checkMethod.addCatch("{ org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent(ugi,$1,$3,false) ; throw $e; }", throwable); checkMethod.insertBefore("{ if ( org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.check(ugi,$1,$3) ) { return ; } }"); } + checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent($1,true) ;"); + CtClass throwable = ClassPool.getDefault().get("java.lang.Throwable"); + checkMethod.addCatch("{ org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.logHadoopEvent($1,false) ; throw $e; }", throwable); + System.out.println("Injection of code is successfull ...."); } else { @@ -144,9 +142,9 @@ public class HadoopAuthClassTransformer implements ClassFileTransformer { if (checkMethod != null) { checkMethod.insertBefore("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.checkPermissionPre($1) ;"); - checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.writeLog($1) ;"); + checkMethod.insertAfter("org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.checkPermissionPost($1) ;"); CtClass throwable = ClassPool.getDefault().get("org.apache.hadoop.security.AccessControlException"); - checkMethod.addCatch("{ org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.writeLog($1); throw $e; }", throwable); + checkMethod.addCatch("{ org.apache.hadoop.hdfs.server.namenode.RangerFSPermissionChecker.checkPermissionPost($1); throw $e; }", throwable); injected_cm = true ; } http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/plugin-common/src/main/java/org/apache/ranger/plugin/service/RangerBasePlugin.java ---------------------------------------------------------------------- diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/service/RangerBasePlugin.java b/plugin-common/src/main/java/org/apache/ranger/plugin/service/RangerBasePlugin.java index 16e3cac..dae02fc 100644 --- a/plugin-common/src/main/java/org/apache/ranger/plugin/service/RangerBasePlugin.java +++ b/plugin-common/src/main/java/org/apache/ranger/plugin/service/RangerBasePlugin.java @@ -28,9 +28,22 @@ import org.apache.ranger.plugin.util.PolicyRefresher; public class RangerBasePlugin { - private boolean initDone = false; - private PolicyRefresher refresher = null; + private boolean initDone = false; + private String serviceType = null; + private PolicyRefresher refresher = null; + + public RangerBasePlugin(String serviceType) { + this.serviceType = serviceType; + } + + public RangerPolicyEngine getPolicyEngine() { + return refresher == null ? null : refresher.getPolicyEngine(); + } + + public String getServiceName() { + return refresher == null ? null : refresher.getServiceName(); + } public boolean init(RangerPolicyEngine policyEngine) { if(!initDone) { @@ -39,18 +52,18 @@ public class RangerBasePlugin { String serviceName = null; // get the serviceName from download URL: http://ranger-admin-host:port/service/assets/policyList/serviceName - String policyDownloadUrl = RangerConfiguration.getInstance().get("xasecure.hdfs.policymgr.url"); + String policyDownloadUrl = RangerConfiguration.getInstance().get("xasecure." + serviceType + ".policymgr.url"); if(! StringUtils.isEmpty(policyDownloadUrl)) { int idx = policyDownloadUrl.lastIndexOf('/'); if(idx != -1) { - serviceName = policyDownloadUrl.substring(idx) + 1; + serviceName = policyDownloadUrl.substring(idx + 1); } } if(StringUtils.isEmpty(serviceName)) { - serviceName = RangerConfiguration.getInstance().get("ranger.plugin.service.name", "hbasedev"); + serviceName = RangerConfiguration.getInstance().get("ranger.plugin." + serviceType + ".service.name"); } ServiceStore serviceStore = ServiceStoreFactory.instance().getServiceStore(); http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/BaseFileStore.java ---------------------------------------------------------------------- diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/BaseFileStore.java b/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/BaseFileStore.java index ea22745..9493e16 100644 --- a/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/BaseFileStore.java +++ b/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/BaseFileStore.java @@ -59,7 +59,7 @@ public class BaseFileStore { protected void init() { - dataDir = RangerConfiguration.getInstance().get("ranger.policystore.file.dir", "/etc/ranger/data"); + dataDir = RangerConfiguration.getInstance().get("ranger.policystore.file.dir", "file:///etc/ranger/data"); try { gsonBuilder = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create(); http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/ServiceFileStore.java ---------------------------------------------------------------------- diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/ServiceFileStore.java b/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/ServiceFileStore.java index fb24393..276c87e 100644 --- a/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/ServiceFileStore.java +++ b/plugin-common/src/main/java/org/apache/ranger/plugin/store/file/ServiceFileStore.java @@ -756,7 +756,7 @@ public class ServiceFileStore extends BaseFileStore implements ServiceStore { RangerService service = getServiceByName(serviceName); if(service == null) { - throw new Exception("service does not exist - name='" + serviceName); + throw new Exception("service does not exist - name=" + serviceName); } RangerServiceDef serviceDef = findServiceDefByName(service.getType()); http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/plugin-common/src/main/java/org/apache/ranger/plugin/util/PolicyRefresher.java ---------------------------------------------------------------------- diff --git a/plugin-common/src/main/java/org/apache/ranger/plugin/util/PolicyRefresher.java b/plugin-common/src/main/java/org/apache/ranger/plugin/util/PolicyRefresher.java index e2eb69e..575798f 100644 --- a/plugin-common/src/main/java/org/apache/ranger/plugin/util/PolicyRefresher.java +++ b/plugin-common/src/main/java/org/apache/ranger/plugin/util/PolicyRefresher.java @@ -53,7 +53,21 @@ public class PolicyRefresher extends Thread { LOG.debug("<== PolicyRefresher.PolicyRefresher(serviceName=" + serviceName + ")"); } } - + + /** + * @return the policyEngine + */ + public RangerPolicyEngine getPolicyEngine() { + return policyEngine; + } + + /** + * @return the serviceName + */ + public String getServiceName() { + return serviceName; + } + /** * @return the pollingIntervalMilliSeconds */ http://git-wip-us.apache.org/repos/asf/incubator-ranger/blob/6a803eaa/plugin-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json ---------------------------------------------------------------------- diff --git a/plugin-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json b/plugin-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json index adf4a29..ca5ced8 100644 --- a/plugin-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json +++ b/plugin-common/src/main/resources/service-defs/ranger-servicedef-hdfs.json @@ -34,6 +34,7 @@ [ {"name":"username","type":"string","mandatory":true,"label":"Username"}, {"name":"password","type":"password","mandatory":true,"label":"Password"}, + {"name":"fs.default.name","type":"string","mandatory":true,"label":"Namenode URL"}, {"name":"hadoop.security.authorization","type":"bool","subType":"TrueFalse","mandatory":true,"defaultValue":"false"}, {"name":"hadoop.security.authentication","type":"enum","subType":"authnType","mandatory":true,"defaultValue":"simple"}, {"name":"hadoop.security.auth_to_local","type":"string","mandatory":false},
