Repository: hive
Updated Branches:
  refs/heads/master 001ab47fe -> d2838990f


HIVE-18841 : Support authorization of UDF usage in hive (Thejas Nair, reviewed 
by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d2838990
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d2838990
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d2838990

Branch: refs/heads/master
Commit: d2838990f8e8211c2a08b8983bb889da8ac9b76e
Parents: 001ab47
Author: Thejas M Nair <the...@hortonworks.com>
Authored: Wed Apr 4 12:02:24 2018 -0700
Committer: Thejas M Nair <the...@hortonworks.com>
Committed: Wed Apr 4 12:02:24 2018 -0700

----------------------------------------------------------------------
 .../TestHiveAuthorizerCheckInvocation.java      | 101 +++++++++++++++++--
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  61 +++++++++--
 .../apache/hadoop/hive/ql/exec/Registry.java    |  17 +++-
 .../apache/hadoop/hive/ql/hooks/ReadEntity.java |  14 ++-
 .../plugin/sqlstd/Operation2Privilege.java      |   2 +
 .../SQLStdHiveAuthorizationValidator.java       |   8 +-
 .../hadoop/hive/ql/session/SessionState.java    |  12 +++
 .../authorization_create_func1.q.out            |   2 +-
 .../authorization_create_func2.q.out            |   2 +-
 9 files changed, 190 insertions(+), 29 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index bc2a34a..a3cdd6e 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.reset;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -33,12 +34,9 @@ import java.util.List;
 
 import org.apache.commons.lang3.tuple.ImmutablePair;
 import org.apache.commons.lang3.tuple.Pair;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.DriverFactory;
-import org.apache.hadoop.hive.ql.IDriver;
+import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
@@ -53,8 +51,8 @@ import org.mockito.ArgumentCaptor;
 import org.mockito.Mockito;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
-
-import static org.mockito.Mockito.when;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Test HiveAuthorizer api invocation
@@ -62,7 +60,7 @@ import static org.mockito.Mockito.when;
 public class TestHiveAuthorizerCheckInvocation {
   private final Logger LOG = 
LoggerFactory.getLogger(this.getClass().getName());;
   protected static HiveConf conf;
-  protected static IDriver driver;
+  protected static Driver driver;
   private static final String tableName = 
TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Table";
   private static final String viewName = 
TestHiveAuthorizerCheckInvocation.class.getSimpleName()
@@ -102,7 +100,7 @@ public class TestHiveAuthorizerCheckInvocation {
     conf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
 
     SessionState.start(conf);
-    driver = DriverFactory.newDriver(conf);
+    driver = new Driver(conf);
     runCmd("create table " + tableName
         + " (i int, j int, k string) partitioned by (city string, `date` 
string) ");
     runCmd("create view " + viewName + " as select * from " + tableName);
@@ -125,7 +123,7 @@ public class TestHiveAuthorizerCheckInvocation {
     runCmd("drop table if exists " + tableName);
     runCmd("drop table if exists " + viewName);
     runCmd("drop table if exists " + fullInTableName);
-    runCmd("drop database if exists " + dbName );
+    runCmd("drop database if exists " + dbName + " CASCADE");
     driver.close();
   }
 
@@ -269,7 +267,7 @@ public class TestHiveAuthorizerCheckInvocation {
 
     HivePrivilegeObject funcObj;
     HivePrivilegeObject dbObj;
-    assertEquals("number of output object", 2, outputs.size());
+    assertEquals("number of output objects", 2, outputs.size());
     if(outputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) {
       funcObj = outputs.get(0);
       dbObj = outputs.get(1);
@@ -284,6 +282,89 @@ public class TestHiveAuthorizerCheckInvocation {
 
     assertEquals("input type", HivePrivilegeObjectType.DATABASE, 
dbObj.getType());
     assertTrue("db name", dbName.equalsIgnoreCase(dbObj.getDbname()));
+
+    // actually create the permanent function
+    CommandProcessorResponse cresponse = driver.run(null, true);
+    assertEquals(0, cresponse.getResponseCode());
+
+    // Verify privilege objects
+    reset(mockedAuthorizer);
+    status = driver.compile("select  " + dbName + "." + funcName + "() , i 
from " + tableName);
+    assertEquals(0, status);
+
+    List<HivePrivilegeObject> inputs = 
getHivePrivilegeObjectInputs().getLeft();
+    assertEquals("number of input objects", 2, inputs.size());
+    HivePrivilegeObject tableObj;
+    if (inputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) {
+      funcObj = inputs.get(0);
+      tableObj = inputs.get(1);
+    } else {
+      funcObj = inputs.get(1);
+      tableObj = inputs.get(0);
+    }
+
+    assertEquals("input type", HivePrivilegeObjectType.FUNCTION, 
funcObj.getType());
+    assertEquals("function name", funcName.toLowerCase(), 
funcObj.getObjectName().toLowerCase());
+    assertEquals("db name", dbName.toLowerCase(), 
funcObj.getDbname().toLowerCase());
+
+    assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, 
tableObj.getType());
+    assertEquals("table name", tableName.toLowerCase(), 
tableObj.getObjectName().toLowerCase());
+
+    // create 2nd permanent function
+    String funcName2 = "funcName2";
+    cresponse = driver
+        .run("create function " + dbName + "." + funcName2 + " as 
'org.apache.hadoop.hive.ql.udf.UDFRand'");
+    assertEquals(0, cresponse.getResponseCode());
+
+    // try using 2nd permanent function and verify its only 2nd one that shows 
up
+    // for auth
+    reset(mockedAuthorizer);
+    status = driver.compile("select  " + dbName + "." + funcName2 + "(i)  from 
" + tableName);
+    assertEquals(0, status);
+
+    inputs = getHivePrivilegeObjectInputs().getLeft();
+    assertEquals("number of input objects", 2, inputs.size());
+    if (inputs.get(0).getType() == HivePrivilegeObjectType.FUNCTION) {
+      funcObj = inputs.get(0);
+      tableObj = inputs.get(1);
+    } else {
+      funcObj = inputs.get(1);
+      tableObj = inputs.get(0);
+    }
+
+    assertEquals("input type", HivePrivilegeObjectType.FUNCTION, 
funcObj.getType());
+    assertEquals("function name", funcName2.toLowerCase(), 
funcObj.getObjectName().toLowerCase());
+    assertEquals("db name", dbName.toLowerCase(), 
funcObj.getDbname().toLowerCase());
+
+    assertEquals("input type", HivePrivilegeObjectType.TABLE_OR_VIEW, 
tableObj.getType());
+    assertEquals("table name", tableName.toLowerCase(), 
tableObj.getObjectName().toLowerCase());
+
+    // try using both permanent functions
+    reset(mockedAuthorizer);
+    status = driver.compile(
+        "select  " + dbName + "." + funcName2 + "(i), " + dbName + "." + 
funcName + "(), j  from " + tableName);
+    assertEquals(0, status);
+
+    inputs = getHivePrivilegeObjectInputs().getLeft();
+    assertEquals("number of input objects", 3, inputs.size());
+    boolean foundF1 = false;
+    boolean foundF2 = false;
+    boolean foundTable = false;
+    for (HivePrivilegeObject inp : inputs) {
+      if (inp.getType() == HivePrivilegeObjectType.FUNCTION) {
+        if (funcName.equalsIgnoreCase(inp.getObjectName())) {
+          foundF1 = true;
+        } else if (funcName2.equalsIgnoreCase(inp.getObjectName())) {
+          foundF2 = true;
+        }
+      } else if (inp.getType() == HivePrivilegeObjectType.TABLE_OR_VIEW
+          && tableName.equalsIgnoreCase(inp.getObjectName().toLowerCase())) {
+        foundTable = true;
+      }
+    }
+    assertTrue("Found " + funcName, foundF1);
+    assertTrue("Found " + funcName2, foundF2);
+    assertTrue("Found Table", foundTable);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index ed3984e..79db006 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -33,6 +33,7 @@ import java.util.LinkedHashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.Queue;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
@@ -57,6 +58,7 @@ import org.apache.hadoop.hive.conf.HiveVariableSource;
 import org.apache.hadoop.hive.conf.VariableSubstitution;
 import org.apache.hadoop.hive.metastore.ColumnType;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreUtils;
+import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.cache.results.CacheUsage;
@@ -66,6 +68,9 @@ import org.apache.hadoop.hive.ql.exec.ConditionalTask;
 import org.apache.hadoop.hive.ql.exec.DagUtils;
 import org.apache.hadoop.hive.ql.exec.ExplainTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -75,6 +80,7 @@ import org.apache.hadoop.hive.ql.exec.TaskRunner;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.HookContext;
 import org.apache.hadoop.hive.ql.hooks.HookUtils;
 import org.apache.hadoop.hive.ql.hooks.PrivateHookContext;
@@ -599,7 +605,9 @@ public class Driver implements IDriver {
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.PARSE);
 
       hookRunner.runBeforeCompileHook(command);
-
+      // clear CurrentFunctionsInUse set, to capture new set of functions
+      // that SemanticAnalyzer finds are in use
+      SessionState.get().getCurrentFunctionsInUse().clear();
       perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.ANALYZE);
 
       // Flush the metastore cache.  This assures that we don't pick up 
objects from a previous
@@ -932,12 +940,22 @@ public class Driver implements IDriver {
       // get mapping of tables to columns used
       ColumnAccessInfo colAccessInfo = sem.getColumnAccessInfo();
       // colAccessInfo is set only in case of SemanticAnalyzer
-      Map<String, List<String>> selectTab2Cols = colAccessInfo != null ? 
colAccessInfo
-          .getTableToColumnAccessMap() : null;
-      Map<String, List<String>> updateTab2Cols = 
sem.getUpdateColumnAccessInfo() != null ?
-          sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
-      doAuthorizationV2(ss, op, inputs, outputs, command, selectTab2Cols, 
updateTab2Cols);
-     return;
+      Map<String, List<String>> selectTab2Cols = colAccessInfo != null
+          ? colAccessInfo.getTableToColumnAccessMap() : null;
+      Map<String, List<String>> updateTab2Cols = 
sem.getUpdateColumnAccessInfo() != null
+          ? sem.getUpdateColumnAccessInfo().getTableToColumnAccessMap() : null;
+
+      // convert to List as above Set was created using Sets.union (for reasons
+      // explained there)
+      // but that Set is immutable
+      List<ReadEntity> inputList = new ArrayList<ReadEntity>(inputs);
+      List<WriteEntity> outputList = new ArrayList<WriteEntity>(outputs);
+
+      // add permanent UDFs being used
+      inputList.addAll(getPermanentFunctionEntities(ss));
+
+      doAuthorizationV2(ss, op, inputList, outputList, command, 
selectTab2Cols, updateTab2Cols);
+      return;
     }
     if (op == null) {
       throw new HiveException("Operation should not be null");
@@ -1077,6 +1095,29 @@ public class Driver implements IDriver {
     }
   }
 
+  private static List<ReadEntity> getPermanentFunctionEntities(SessionState 
ss) throws HiveException {
+    List<ReadEntity> functionEntities = new ArrayList<>();
+    for (Entry<String, FunctionInfo> permFunction : 
ss.getCurrentFunctionsInUse().entrySet()) {
+      if (permFunction.getValue().getFunctionType() != 
FunctionType.PERSISTENT) {
+        // Only permanent functions need to be authorized.
+        // Built-in function access is allowed to all users.
+        // If user can create a temp function, they should be able to use it
+        // without additional authorization.
+        continue;
+      }
+      functionEntities.add(createReadEntity(permFunction.getKey(), 
permFunction.getValue()));
+    }
+    return functionEntities;
+  }
+
+  private static ReadEntity createReadEntity(String functionName, FunctionInfo 
functionInfo)
+      throws HiveException {
+    String[] qualFunctionName = 
FunctionUtils.getQualifiedFunctionNameParts(functionName);
+    // this is only for the purpose of authorization, only the name matters.
+    Database db = new Database(qualFunctionName[0], "", "", null);
+    return new ReadEntity(db, qualFunctionName[1], 
functionInfo.getClassName(), Type.FUNCTION);
+  }
+
   private static void getTablePartitionUsedColumns(HiveOperation op, 
BaseSemanticAnalyzer sem,
       Map<Table, List<String>> tab2Cols, Map<Partition, List<String>> 
part2Cols,
       Map<String, Boolean> tableUsePartLevelAuth) throws HiveException {
@@ -1131,8 +1172,8 @@ public class Driver implements IDriver {
     }
   }
 
-  private static void doAuthorizationV2(SessionState ss, HiveOperation op, 
Set<ReadEntity> inputs,
-      Set<WriteEntity> outputs, String command, Map<String, List<String>> 
tab2cols,
+  private static void doAuthorizationV2(SessionState ss, HiveOperation op, 
List<ReadEntity> inputs,
+      List<WriteEntity> outputs, String command, Map<String, List<String>> 
tab2cols,
       Map<String, List<String>> updateTab2Cols) throws HiveException {
 
     /* comment for reviewers -> updateTab2Cols needed to be separate from 
tab2cols because if I
@@ -1153,7 +1194,7 @@ public class Driver implements IDriver {
   }
 
   private static List<HivePrivilegeObject> getHivePrivObjects(
-      Set<? extends Entity> privObjects, Map<String, List<String>> 
tableName2Cols) {
+      List<? extends Entity> privObjects, Map<String, List<String>> 
tableName2Cols) {
     List<HivePrivilegeObject> hivePrivobjs = new 
ArrayList<HivePrivilegeObject>();
     if(privObjects == null){
       return hivePrivobjs;

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
index 19328c2..0900976 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
@@ -318,7 +318,9 @@ public class Registry {
     try {
       functionName = functionName.toLowerCase();
       if (FunctionUtils.isQualifiedFunctionName(functionName)) {
-        return getQualifiedFunctionInfoUnderLock(functionName);
+        FunctionInfo functionInfo = 
getQualifiedFunctionInfoUnderLock(functionName);
+        addToCurrentFunctions(functionName, functionInfo);
+        return functionInfo;
       }
       // First try without qualifiers - would resolve builtin/temp functions.
       // Otherwise try qualifying with current db name.
@@ -327,17 +329,24 @@ public class Registry {
         throw new SemanticException ("UDF " + functionName + " is not 
allowed");
       }
       if (functionInfo == null) {
-        String qualifiedName = FunctionUtils.qualifyFunctionName(
+        functionName = FunctionUtils.qualifyFunctionName(
             functionName, 
SessionState.get().getCurrentDatabase().toLowerCase());
-        functionInfo = getQualifiedFunctionInfoUnderLock(qualifiedName);
+        functionInfo = getQualifiedFunctionInfoUnderLock(functionName);
       }
-    return functionInfo;
+      addToCurrentFunctions(functionName, functionInfo);
+      return functionInfo;
     } finally {
       lock.unlock();
     }
 
   }
 
+  private void addToCurrentFunctions(String functionName, FunctionInfo 
functionInfo) {
+    if (SessionState.get() != null && functionInfo != null) {
+      SessionState.get().getCurrentFunctionsInUse().put(functionName, 
functionInfo);
+    }
+  }
+
   public WindowFunctionInfo getWindowFunctionInfo(String functionName) throws 
SemanticException {
     FunctionInfo info = getFunctionInfo(WINDOW_FUNC_PREFIX + functionName);
     if (info instanceof WindowFunctionInfo) {

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
index 7b654c5..2678def 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.hooks;
 
 import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.HashSet;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Set;
@@ -109,6 +108,19 @@ public class ReadEntity extends Entity implements 
Serializable {
   }
 
   /**
+   * Constructor for objects represented as String. Currently applicable only
+   * for function names.
+   *
+   * @param db
+   * @param objName
+   * @param className
+   * @param type
+   */
+  public ReadEntity(Database db, String objName, String className, Type type) {
+    super(db, objName, className, type);
+  }
+
+  /**
    * Constructor given a partition.
    *
    * @param p

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index e408ea4..4b34f8a 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -327,6 +327,8 @@ public class Operation2Privilege {
     adminPrivOps.add(HiveOperationType.CREATE_MAPPING);
     adminPrivOps.add(HiveOperationType.ALTER_MAPPING);
     adminPrivOps.add(HiveOperationType.DROP_MAPPING);
+    adminPrivOps.add(HiveOperationType.CREATEFUNCTION);
+    adminPrivOps.add(HiveOperationType.DROPFUNCTION);
 
     // operations require select priv
     op2Priv.put(HiveOperationType.SHOWCOLUMNS, 
PrivRequirement.newIOPrivRequirement

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
index 4e456e7..a9fae4f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
@@ -129,13 +129,17 @@ public class SQLStdHiveAuthorizationValidator implements 
HiveAuthorizationValida
         // ignore partitions
         continue;
       case COMMAND_PARAMS:
-      case FUNCTION:
-        // operations that have objects of type COMMAND_PARAMS, FUNCTION are 
authorized
+        // operations that have objects of type COMMAND_PARAMS are authorized
         // solely on the type
         if (privController.isUserAdmin()) {
           availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
         }
         break;
+      case FUNCTION:
+        // create/drop functions are marked as ADMIN functions
+        // Usage of available functions in query are not restricted by sql
+        // standard authorization.
+        continue;
       default:
         availPrivs = 
SQLAuthorizationUtils.getPrivilegesFromMetaStore(metastoreClient, userName,
             hiveObj, privController.getCurrentRoleNames(), 
privController.isUserAdmin());

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 0071a9a..27e42b1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hive.metastore.ObjectStore;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.cache.CachedStore;
 import org.apache.hadoop.hive.ql.MapRedStats;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.Registry;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.spark.session.SparkSession;
@@ -275,6 +276,12 @@ public class SessionState {
   private final Registry registry;
 
   /**
+   * Used to cache functions in use for a query, during query planning
+   * and is later used for function usage authorization.
+   */
+  private final Map<String, FunctionInfo> currentFunctionsInUse = new 
HashMap<>();
+
+  /**
    * CURRENT_TIMESTAMP value for query
    */
   private Timestamp queryCurrentTimestamp;
@@ -1990,6 +1997,11 @@ public class SessionState {
   public void addCleanupItem(Closeable item) {
     cleanupItems.add(item);
   }
+
+  public Map<String, FunctionInfo> getCurrentFunctionsInUse() {
+    return currentFunctionsInUse;
+  }
+
 }
 
 class ResourceMaps {

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/test/results/clientnegative/authorization_create_func1.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientnegative/authorization_create_func1.q.out 
b/ql/src/test/results/clientnegative/authorization_create_func1.q.out
index 6e11f04..b2532eb 100644
--- a/ql/src/test/results/clientnegative/authorization_create_func1.q.out
+++ b/ql/src/test/results/clientnegative/authorization_create_func1.q.out
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied: Principal 
[name=hive_test_user, type=USER] does not have following privileges for 
operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=DATABASE, 
name=default], [ADMIN PRIVILEGE] on Object [type=FUNCTION, 
name=default.perm_fn]]
+FAILED: HiveAccessControlException Permission denied: Principal 
[name=hive_test_user, type=USER] does not have following privileges for 
operation CREATEFUNCTION [ADMIN PRIVILEGE on INPUT, ADMIN PRIVILEGE on OUTPUT]

http://git-wip-us.apache.org/repos/asf/hive/blob/d2838990/ql/src/test/results/clientnegative/authorization_create_func2.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientnegative/authorization_create_func2.q.out 
b/ql/src/test/results/clientnegative/authorization_create_func2.q.out
index 0afd36f..b2532eb 100644
--- a/ql/src/test/results/clientnegative/authorization_create_func2.q.out
+++ b/ql/src/test/results/clientnegative/authorization_create_func2.q.out
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied: Principal 
[name=hive_test_user, type=USER] does not have following privileges for 
operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=FUNCTION, 
name=temp_fn]]
+FAILED: HiveAccessControlException Permission denied: Principal 
[name=hive_test_user, type=USER] does not have following privileges for 
operation CREATEFUNCTION [ADMIN PRIVILEGE on INPUT, ADMIN PRIVILEGE on OUTPUT]

Reply via email to