This is an automated email from the ASF dual-hosted git repository.
dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 62b1c7bf3b9 HIVE-27116: HS2 need to send owner info for UDFs in the
HivePrivilegeObject for authorization (Sai Hemanth Gantasala, reviewed by
Zhihua Deng)
62b1c7bf3b9 is described below
commit 62b1c7bf3b91a296e466774a6cb3ec31151e4067
Author: Sai Hemanth Gantasala
<[email protected]>
AuthorDate: Fri Mar 10 14:56:49 2023 +0530
HIVE-27116: HS2 need to send owner info for UDFs in the HivePrivilegeObject
for authorization (Sai Hemanth Gantasala, reviewed by Zhihua Deng)
Closes #4092
---
.../hadoop/hive/ql/TestCreateUdfEntities.java | 4 +--
.../ql/ddl/function/AbstractFunctionAnalyzer.java | 24 +++++++++++++--
.../org/apache/hadoop/hive/ql/hooks/Entity.java | 36 +++++++++++++++++++---
.../apache/hadoop/hive/ql/hooks/ReadEntity.java | 1 +
.../apache/hadoop/hive/ql/hooks/WriteEntity.java | 6 ++++
.../authorization/command/CommandAuthorizerV2.java | 13 ++++++--
.../llap/authorization_functions_in_views.q.out | 4 +--
7 files changed, 74 insertions(+), 14 deletions(-)
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
index 325831e2eb9..0d7503668bc 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestCreateUdfEntities.java
@@ -59,7 +59,7 @@ public class TestCreateUdfEntities {
assertEquals("default", outputEntities[0].getDatabase().getName());
assertEquals(Entity.Type.FUNCTION, outputEntities[1].getType());
- assertEquals(funcName, outputEntities[1].getFunctionName());
+ assertEquals(funcName, outputEntities[1].getFunction().getFunctionName());
assertEquals(Entity.Type.LOCAL_DIR, outputEntities[2].getType());
assertEquals("file:///tmp/udf1.jar",
outputEntities[2].getLocation().toString());
@@ -77,7 +77,7 @@ public class TestCreateUdfEntities {
assertEquals("default", outputEntities[0].getDatabase().getName());
assertEquals(Entity.Type.FUNCTION, outputEntities[1].getType());
- assertEquals(funcName, outputEntities[1].getFunctionName());
+ assertEquals(funcName, outputEntities[1].getFunction().getFunctionName());
assertEquals(Entity.Type.DFS_DIR, outputEntities[2].getType());
assertEquals("hdfs:///tmp/udf1.jar",
outputEntities[2].getLocation().toString());
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
index 997cb97d950..2ca87cca193 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/AbstractFunctionAnalyzer.java
@@ -21,6 +21,9 @@ package org.apache.hadoop.hive.ql.ddl.function;
import java.util.List;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.FunctionType;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -29,6 +32,8 @@ import org.apache.hadoop.hive.ql.hooks.Entity.Type;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.HiveOperation;
+import org.apache.hadoop.hive.ql.session.SessionState;
/**
* Abstract ancestor of function related ddl analyzer classes.
@@ -66,11 +71,24 @@ public abstract class AbstractFunctionAnalyzer extends
BaseSemanticAnalyzer {
}
if (database != null) {
outputs.add(new WriteEntity(database,
WriteEntity.WriteType.DDL_NO_LOCK));
+ // Add the permanent function as a WriteEntity
+ Function function;
+ if (queryState.getHiveOperation().equals(HiveOperation.CREATEFUNCTION)) {
+ function = new Function(functionName, database.getName(), className,
+ SessionState.getUserFromAuthenticator(), PrincipalType.USER,
+ (int) (System.currentTimeMillis() / 1000), FunctionType.JAVA,
resources);
+ } else {
+ try {
+ function = db.getFunction(database.getName(), functionName);
+ } catch (HiveException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ outputs.add(new WriteEntity(function,
WriteEntity.WriteType.DDL_NO_LOCK));
+ } else { // Temporary functions
+ outputs.add(new WriteEntity(database, functionName, className,
Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
}
- // Add the function name as a WriteEntity
- outputs.add(new WriteEntity(database, functionName, className,
Type.FUNCTION, WriteEntity.WriteType.DDL_NO_LOCK));
-
if (resources != null) {
for (ResourceUri resource : resources) {
String uriPath = resource.getUri();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
index 254605e1db5..72714e6297f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
@@ -25,6 +25,7 @@ import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
@@ -70,6 +71,11 @@ public class Entity implements Serializable {
*/
private Partition p;
+ /**
+ * The metastore api function. This is null if this object is not a partition
+ */
+ private Function f;
+
/**
* The directory if this is a directory
*/
@@ -302,7 +308,7 @@ public class Entity implements Serializable {
public Entity(Database database, String strObj, String className, Type type)
{
if (type != Type.FUNCTION) {
throw new IllegalArgumentException("This constructor is supported only
for type:"
- + Type.FUNCTION);
+ + Type.FUNCTION);
}
this.database = database;
this.stringObject = strObj;
@@ -312,6 +318,21 @@ public class Entity implements Serializable {
name = computeName();
}
+ /**
+ * Constructor for a function.
+ *
+ * @param f
+ * Function that is read or written to.
+ */
+ public Entity(Function f, boolean complete) {
+ d = null;
+ p = null;
+ this.f = f;
+ typ = Type.FUNCTION;
+ name = computeName();
+ this.complete = complete;
+ }
+
/**
* Get the parameter map of the Entity.
*/
@@ -374,6 +395,13 @@ public class Entity implements Serializable {
return t;
}
+ /**
+ * Get the function associated with the entity.
+ */
+ public Function getFunction() {
+ return f;
+ }
+
public boolean isDummy() {
if (typ == Type.DATABASE) {
return database.getName().equals(SemanticAnalyzer.DUMMY_DATABASE);
@@ -407,10 +435,10 @@ public class Entity implements Serializable {
case DUMMYPARTITION:
return p.getName();
case FUNCTION:
- if (database != null) {
- return database.getName() + "." + stringObject;
+ if (f != null) {
+ return f.getDbName() + "." + f.getFunctionName();
}
- return stringObject;
+ return database != null ? database.getName() + "." + stringObject :
stringObject;
case SERVICE_NAME:
return stringObject;
case DATACONNECTOR:
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
index df4fe3c8106..68c139fd471 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
@@ -27,6 +27,7 @@ import java.util.Set;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.api.DataConnector;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
index be64185f7a9..fec4fe7be69 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
@@ -22,6 +22,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.DataConnector;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
@@ -90,6 +91,11 @@ public class WriteEntity extends Entity implements
Serializable {
setWriteTypeInternal(type);
}
+ public WriteEntity(Function function, WriteType type) {
+ super(function, true);
+ setWriteTypeInternal(type);
+ }
+
/**
* Constructor for objects represented as String.
* Currently applicable only for function names.
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
index 27fbe94f38d..c21dca345ef 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/command/CommandAuthorizerV2.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.DataConnector;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.ql.exec.FunctionInfo;
import org.apache.hadoop.hive.ql.exec.FunctionUtils;
import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionType;
@@ -219,9 +220,15 @@ final class CommandAuthorizerV2 {
actionType, null, null, null, null);
break;
case FUNCTION:
- String dbName = privObject.getDatabase() != null ?
privObject.getDatabase().getName() : null;
- hivePrivObject = new HivePrivilegeObject(privObjType, dbName,
privObject.getFunctionName(),
- null, null, actionType, null, privObject.getClassName(), null, null);
+ if (privObject.getFunction() != null) {
+ Function function = privObject.getFunction();
+ hivePrivObject = new HivePrivilegeObject(privObjType,
function.getDbName(), function.getFunctionName(),
+ null, null, actionType, null, function.getClassName(),
function.getOwnerName(), function.getOwnerType());
+ } else {
+ String dbName = privObject.getDatabase() != null ?
privObject.getDatabase().getName() : null;
+ hivePrivObject = new HivePrivilegeObject(privObjType, dbName,
privObject.getFunctionName(),
+ null, null, actionType, null, privObject.getClassName(), null,
null);
+ }
break;
case DUMMYPARTITION:
case PARTITION:
diff --git
a/ql/src/test/results/clientpositive/llap/authorization_functions_in_views.q.out
b/ql/src/test/results/clientpositive/llap/authorization_functions_in_views.q.out
index 4094871cab2..7da69d63a28 100644
---
a/ql/src/test/results/clientpositive/llap/authorization_functions_in_views.q.out
+++
b/ql/src/test/results/clientpositive/llap/authorization_functions_in_views.q.out
@@ -134,11 +134,11 @@ POSTHOOK: Input: default@view_using_udf
PREHOOK: query: drop function test.UDF_Upper
PREHOOK: type: DROPFUNCTION
PREHOOK: Output: database:test
-PREHOOK: Output: test.UDF_Upper
+PREHOOK: Output: test.udf_upper
POSTHOOK: query: drop function test.UDF_Upper
POSTHOOK: type: DROPFUNCTION
POSTHOOK: Output: database:test
-POSTHOOK: Output: test.UDF_Upper
+POSTHOOK: Output: test.udf_upper
PREHOOK: query: drop database test
PREHOOK: type: DROPDATABASE
PREHOOK: Input: database:test