This is an automated email from the ASF dual-hosted git repository.

tdsilva pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix.git


The following commit(s) were added to refs/heads/master by this push:
     new 1f2508d  PHOENIX-5269 use AccessChecker to check for user permisssions
1f2508d is described below

commit 1f2508dbde365aaedac628c89df237e8b6b46df8
Author: Kiran Kumar Maturi <maturi.ki...@gmail.com>
AuthorDate: Mon Jun 17 16:42:49 2019 +0530

    PHOENIX-5269 use AccessChecker to check for user permisssions
---
 .../apache/phoenix/end2end/PermissionsCacheIT.java | 107 +++++++++++++++++++++
 .../phoenix/coprocessor/MetaDataEndpointImpl.java  |   2 +
 .../coprocessor/PhoenixAccessController.java       |  77 +++++++++++++--
 .../PhoenixMetaDataCoprocessorHost.java            |   5 +
 4 files changed, 185 insertions(+), 6 deletions(-)

diff --git 
a/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionsCacheIT.java 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionsCacheIT.java
new file mode 100644
index 0000000..8d0c694
--- /dev/null
+++ 
b/phoenix-core/src/it/java/org/apache/phoenix/end2end/PermissionsCacheIT.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.end2end;
+
+import static org.junit.Assert.assertTrue;
+
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.AuthUtil;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.security.access.AccessControlLists;
+import org.apache.hadoop.hbase.security.access.Permission.Action;
+import org.apache.hadoop.hbase.security.access.TablePermission;
+import org.apache.hadoop.hbase.zookeeper.ZKUtil;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
+import org.apache.hadoop.hbase.zookeeper.ZNodePaths;
+import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap;
+import org.apache.phoenix.util.SchemaUtil;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class PermissionsCacheIT extends BasePermissionsIT {
+
+    public PermissionsCacheIT() throws Exception {
+               super(true);
+       }
+    
+    @BeforeClass
+    public static void doSetup() throws Exception {
+        BasePermissionsIT.initCluster(true);
+    }
+
+    @Test
+    public void testPermissionsCachedWithAccessChecker() throws Throwable {
+        if (!isNamespaceMapped) {
+            return;
+        }
+        final String schema = generateUniqueName();
+        final String tableName = generateUniqueName();
+        final String phoenixTableName = SchemaUtil.getTableName(schema, 
tableName);
+        try (Connection conn = getConnection()) {
+            grantPermissions(regularUser1.getShortName(), 
PHOENIX_NAMESPACE_MAPPED_SYSTEM_TABLES,
+                Action.READ, Action.EXEC);
+            grantPermissions(regularUser1.getShortName(), 
Collections.singleton("SYSTEM:SEQUENCE"),
+                Action.WRITE, Action.READ, Action.EXEC);
+            superUser1.runAs(new PrivilegedExceptionAction<Void>() {
+                @Override
+                public Void run() throws Exception {
+                    try {
+                        verifyAllowed(createSchema(schema), superUser1);
+                        grantPermissions(regularUser1.getShortName(), schema, 
Action.CREATE);
+                        
grantPermissions(AuthUtil.toGroupEntry(GROUP_SYSTEM_ACCESS), schema,
+                            Action.CREATE);
+                    } catch (Throwable e) {
+                        if (e instanceof Exception) {
+                            throw (Exception) e;
+                        } else {
+                            throw new Exception(e);
+                        }
+                    }
+                    return null;
+                }
+            });
+            verifyAllowed(createTable(phoenixTableName), regularUser1);
+            HBaseTestingUtility utility = getUtility();
+            Configuration conf = utility.getConfiguration();
+            ZKWatcher zkw = HBaseTestingUtility.getZooKeeperWatcher(utility);
+            String aclZnodeParent = conf.get("zookeeper.znode.acl.parent", 
"acl");
+            String aclZNode = ZNodePaths.joinZNode(zkw.znodePaths.baseZNode, 
aclZnodeParent);
+            String tableZNode = ZNodePaths.joinZNode(aclZNode, "@" + schema);
+            byte[] data = ZKUtil.getData(zkw, tableZNode);
+            ListMultimap<String, TablePermission> userPermissions =
+                    AccessControlLists.readPermissions(data, conf);
+            assertTrue("User permissions not found in cache:",
+                userPermissions.containsKey(regularUser1.getName()));
+            List<TablePermission> tablePermissions = 
userPermissions.get(regularUser1.getName());
+            for (TablePermission tablePerm : tablePermissions) {
+                assertTrue("Table create permission don't exist", 
tablePerm.implies(Action.CREATE));
+            }
+        } catch (Exception e) {
+            System.out.println("Exception occurred: " + e);
+            throw e;
+        } finally {
+            revokeAll();
+        }
+    }
+
+}
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
index a3d44ba..cf8217a 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/MetaDataEndpointImpl.java
@@ -126,6 +126,7 @@ import org.apache.hadoop.hbase.client.Result;
 import org.apache.hadoop.hbase.client.Scan;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
+import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
@@ -300,6 +301,7 @@ import com.google.protobuf.Service;
  * @since 0.1
  */
 @SuppressWarnings("deprecation")
+@CoreCoprocessor
 public class MetaDataEndpointImpl extends MetaDataProtocol implements 
RegionCoprocessor {
     private static final Logger LOGGER = 
LoggerFactory.getLogger(MetaDataEndpointImpl.class);
 
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
index 997f5a0..aec28de 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixAccessController.java
@@ -40,11 +40,13 @@ import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
+import org.apache.hadoop.hbase.coprocessor.HasRegionServerServices;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
 import org.apache.hadoop.hbase.coprocessor.ObserverContextImpl;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.ipc.RpcServer;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
@@ -53,13 +55,16 @@ import 
org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
 import org.apache.hadoop.hbase.security.AccessDeniedException;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
+import org.apache.hadoop.hbase.security.access.AccessChecker;
 import org.apache.hadoop.hbase.security.access.AccessControlClient;
 import org.apache.hadoop.hbase.security.access.AccessControlUtil;
 import org.apache.hadoop.hbase.security.access.AuthResult;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.access.Permission.Action;
+import org.apache.hadoop.hbase.security.access.TableAuthManager;
 import org.apache.hadoop.hbase.security.access.UserPermission;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
 import 
org.apache.phoenix.coprocessor.PhoenixMetaDataCoprocessorHost.PhoenixMetaDataControllerEnvironment;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
@@ -79,8 +84,10 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
 
     private PhoenixMetaDataControllerEnvironment env;
     AtomicReference<ArrayList<MasterObserver>> accessControllers = new 
AtomicReference<>();
+    private boolean hbaseAccessControllerEnabled;
     private boolean accessCheckEnabled;
     private UserProvider userProvider;
+    private AccessChecker accessChecker;
     public static final Logger LOGGER = 
LoggerFactory.getLogger(PhoenixAccessController.class);
     private static final Logger AUDITLOG =
             
LoggerFactory.getLogger("SecurityLogger."+PhoenixAccessController.class.getName());
@@ -98,6 +105,9 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
             for (RegionCoprocessor cp : 
cpHost.findCoprocessors(RegionCoprocessor.class)) {
                 if (cp instanceof AccessControlService.Interface && cp 
instanceof MasterObserver) {
                     oldAccessControllers.add((MasterObserver)cp);
+                    
if(cp.getClass().getName().equals(org.apache.hadoop.hbase.security.access.AccessController.class.getName()))
 {
+                        hbaseAccessControllerEnabled = true;
+                    }
                 }
             }
             accessControllers.set(oldAccessControllers);
@@ -130,6 +140,13 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
             throw new IllegalArgumentException(
                     "Not a valid environment, should be loaded by 
PhoenixMetaDataControllerEnvironment");
         }
+
+        ZKWatcher zk = null;
+        RegionCoprocessorEnvironment regionEnv = 
this.env.getRegionCoprocessorEnvironment();
+        if (regionEnv instanceof HasRegionServerServices) {
+            zk = ((HasRegionServerServices) 
regionEnv).getRegionServerServices().getZooKeeper();
+        }
+        accessChecker = new AccessChecker(env.getConfiguration(), zk);
         // set the user-provider.
         this.userProvider = UserProvider.instantiate(env.getConfiguration());
         // init superusers and add the server principal (if using security)
@@ -138,7 +155,11 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
     }
 
     @Override
-    public void stop(CoprocessorEnvironment env) throws IOException {}
+    public void stop(CoprocessorEnvironment env) throws IOException {
+        if(accessChecker.getAuthManager() != null) {
+            TableAuthManager.release(accessChecker.getAuthManager());
+        }
+    }
 
     @Override
     public void 
preCreateTable(ObserverContext<PhoenixMetaDataControllerEnvironment> ctx, 
String tenantId,
@@ -412,7 +433,8 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
       * @throws IOException
       */
      private List<UserPermission> getUserPermissions(final TableName 
tableName) throws IOException {
-        return User.runAsLoginUser(new 
PrivilegedExceptionAction<List<UserPermission>>() {
+         List<UserPermission> userPermissions =
+                 User.runAsLoginUser(new 
PrivilegedExceptionAction<List<UserPermission>>() {
             @Override
             public List<UserPermission> run() throws Exception {
                 final List<UserPermission> userPermissions = new 
ArrayList<UserPermission>();
@@ -424,8 +446,6 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
                             
userPermissions.addAll(AccessControlClient.getUserPermissions(connection, 
tableName.getNameAsString()));
                             
userPermissions.addAll(AccessControlClient.getUserPermissions(
                                      connection, 
AuthUtil.toGroupEntry(tableName.getNamespaceAsString())));
-                        } else {
-                            
getUserPermsFromUserDefinedAccessController(userPermissions, connection, 
(AccessControlService.Interface) service);
                         }
                     }
                 } catch (Throwable e) {
@@ -438,7 +458,39 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
                 }
                 return userPermissions;
             }
-
+         });
+         getUserDefinedPermissions(tableName, userPermissions);
+         return userPermissions;
+       }
+
+     private void getUserDefinedPermissions(final TableName tableName,
+             final List<UserPermission> userPermissions) throws IOException {
+          User.runAsLoginUser(new 
PrivilegedExceptionAction<List<UserPermission>>() {
+              @Override
+              public List<UserPermission> run() throws Exception {
+                  final List<UserPermission> userPermissions = new 
ArrayList<UserPermission>();
+                 try (Connection connection =
+                         
ConnectionFactory.createConnection(((CoprocessorEnvironment) 
env).getConfiguration())) {
+                      for (MasterObserver service : getAccessControllers()) {
+                         if (service.getClass().getName().equals(
+                             
org.apache.hadoop.hbase.security.access.AccessController.class
+                                     .getName())) {
+                              continue;
+                           } else {
+                             
getUserPermsFromUserDefinedAccessController(userPermissions, connection,
+                                 (AccessControlService.Interface) service);
+                           }
+                      }
+                  } catch (Throwable e) {
+                      if (e instanceof Exception) {
+                          throw (Exception) e;
+                      } else if (e instanceof Error) {
+                          throw (Error) e;
+                      }
+                      throw new Exception(e);
+                  }
+                  return userPermissions;
+              }
             private void getUserPermsFromUserDefinedAccessController(final 
List<UserPermission> userPermissions, Connection connection, 
AccessControlService.Interface service) {
 
                 RpcController controller = (RpcController) 
((ClusterConnection)connection)
@@ -491,6 +543,10 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
         User user = getActiveUser();
         AuthResult result = null;
         List<Action> requiredAccess = new ArrayList<Action>();
+        List<UserPermission> userPermissions = new ArrayList<>();
+        if(permissions.length > 0) {
+           getUserDefinedPermissions(tableName, userPermissions);
+        }
         for (Action permission : permissions) {
              if (hasAccess(getUserPermissions(tableName), tableName, 
permission, user)) {
                 result = AuthResult.allow(request, "Table permission granted", 
user, permission, tableName, null, null);
@@ -520,6 +576,10 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
             return true;
         }
         if (perms != null) {
+            if (hbaseAccessControllerEnabled
+                    && accessChecker.getAuthManager().userHasAccess(user, 
table, action)) {
+                return true;
+            }
             List<UserPermission> permissionsForUser = 
getPermissionForUser(perms, user.getShortName().getBytes());
             if (permissionsForUser != null) {
                 for (UserPermission permissionForUser : permissionsForUser) {
@@ -529,7 +589,12 @@ public class PhoenixAccessController extends 
BaseMetaDataEndpointObserver {
             String[] groupNames = user.getGroupNames();
             if (groupNames != null) {
               for (String group : groupNames) {
-                List<UserPermission> groupPerms = 
getPermissionForUser(perms,(AuthUtil.toGroupEntry(group)).getBytes());
+                    List<UserPermission> groupPerms =
+                            getPermissionForUser(perms, 
(AuthUtil.toGroupEntry(group)).getBytes());
+                    if (hbaseAccessControllerEnabled && 
accessChecker.getAuthManager()
+                            .groupHasAccess(group, table, action)) {
+                        return true;
+                    }
                 if (groupPerms != null) for (UserPermission permissionForUser 
: groupPerms) {
                     if (permissionForUser.implies(action)) { return true; }
                 }
diff --git 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixMetaDataCoprocessorHost.java
 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixMetaDataCoprocessorHost.java
index 9bb71c3..72b5e01 100644
--- 
a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixMetaDataCoprocessorHost.java
+++ 
b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/PhoenixMetaDataCoprocessorHost.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
 import org.apache.hadoop.hbase.regionserver.HRegion;
 import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
+import org.apache.hadoop.hbase.regionserver.RegionServerServices;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.query.QueryServicesOptions;
@@ -146,6 +147,10 @@ public class PhoenixMetaDataCoprocessorHost
         public RegionCoprocessorHost getCoprocessorHost() {
             return ((HRegion)env.getRegion()).getCoprocessorHost();
         }
+
+        public RegionCoprocessorEnvironment getRegionCoprocessorEnvironment() {
+            return env;
+        }
     }
     
 

Reply via email to