This is an automated email from the ASF dual-hosted git repository.
mahesh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 0cde39a HIVE-23361 : Optimising privilege synchroniser. (Simhadri G,
reviewed by Mahesh Kumar Behera)
0cde39a is described below
commit 0cde39a7ddb4f8049e3b43724d929da25b045112
Author: Simhadri G <[email protected]>
AuthorDate: Thu May 28 12:22:12 2020 +0530
HIVE-23361 : Optimising privilege synchroniser. (Simhadri G, reviewed by
Mahesh Kumar Behera)
Signed-off-by: Mahesh Kumar Behera <[email protected]>
---
.../InformationSchemaWithPrivilegeTestBase.java | 2 +-
.../upgrade/hive/hive-schema-4.0.0.hive.sql | 16 +-
.../upgrade/hive/upgrade-3.1.0-to-4.0.0.hive.sql | 195 +++++++++++++++++++++
.../hadoop/hive/ql/exec/FunctionRegistry.java | 1 +
.../authorization/PrivilegeSynchronizer.java | 29 ++-
.../udf/generic/GenericUDFStringToPrivilege.java | 101 +++++++++++
.../hive/ql/udf/generic/TestUDFSplitMapPrivs.java | 132 ++++++++++++++
.../test/queries/clientpositive/split_map_privs.q | 17 ++
.../clientpositive/llap/show_functions.q.out | 2 +
.../clientpositive/llap/split_map_privs.q.out | 66 +++++++
10 files changed, 553 insertions(+), 8 deletions(-)
diff --git
a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java
b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java
index 7302e09..cebfdff 100644
---
a/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java
+++
b/itests/hive-unit/src/test/java/org/apache/hive/service/server/InformationSchemaWithPrivilegeTestBase.java
@@ -286,7 +286,7 @@ public abstract class
InformationSchemaWithPrivilegeTestBase {
List<String> args = new ArrayList<String>(baseArgs);
args.add("-f");
-
args.add("../../metastore/scripts/upgrade/hive/hive-schema-3.1.0.hive.sql");
+
args.add("../../metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql");
BeeLine beeLine = new BeeLine();
int result = beeLine.begin(args.toArray(new String[] {}), null);
beeLine.close();
diff --git a/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql
b/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql
index d857410..cc9aeef 100644
--- a/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql
+++ b/metastore/scripts/upgrade/hive/hive-schema-4.0.0.hive.sql
@@ -1677,7 +1677,8 @@ WHERE
AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
- AND P.`TBL_COL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer();
+ AND array_contains(split_map_privs(P.`TBL_COL_PRIV`),"SELECT") AND
P.`AUTHORIZER`=current_authorizer();
+
CREATE OR REPLACE VIEW `COLUMN_PRIVILEGES`
(
@@ -1700,7 +1701,18 @@ SELECT DISTINCT
P.`TBL_COL_PRIV`,
IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
FROM
- `sys`.`TBL_COL_PRIVS` P JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
+ (SELECT
+ Q.`GRANTOR`,
+ Q.`GRANT_OPTION`,
+ Q.`PRINCIPAL_NAME`,
+ Q.`PRINCIPAL_TYPE`,
+ Q.`AUTHORIZER`,
+ Q.`COLUMN_NAME`,
+ `TBL_COL_PRIV_TMP`.`TBL_COL_PRIV`,
+ Q.`TBL_ID`
+ FROM `sys`.`TBL_COL_PRIVS` AS Q
+ LATERAL VIEW explode(split_map_privs(Q.`TBL_COL_PRIV`))
`TBL_COL_PRIV_TMP` AS `TBL_COL_PRIV`) P
+ JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
JOIN `sys`.`SDS` S ON (S.`SD_ID` = T.`SD_ID`)
LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` =
P2.`TBL_ID`)
diff --git a/metastore/scripts/upgrade/hive/upgrade-3.1.0-to-4.0.0.hive.sql
b/metastore/scripts/upgrade/hive/upgrade-3.1.0-to-4.0.0.hive.sql
index 0523e25..09c95d7 100644
--- a/metastore/scripts/upgrade/hive/upgrade-3.1.0-to-4.0.0.hive.sql
+++ b/metastore/scripts/upgrade/hive/upgrade-3.1.0-to-4.0.0.hive.sql
@@ -497,6 +497,201 @@ CREATE OR REPLACE VIEW `VERSION` AS SELECT 1 AS `VER_ID`,
'4.0.0' AS `SCHEMA_VER
USE INFORMATION_SCHEMA;
+
+CREATE OR REPLACE VIEW `COLUMNS`
+(
+ `TABLE_CATALOG`,
+ `TABLE_SCHEMA`,
+ `TABLE_NAME`,
+ `COLUMN_NAME`,
+ `ORDINAL_POSITION`,
+ `COLUMN_DEFAULT`,
+ `IS_NULLABLE`,
+ `DATA_TYPE`,
+ `CHARACTER_MAXIMUM_LENGTH`,
+ `CHARACTER_OCTET_LENGTH`,
+ `NUMERIC_PRECISION`,
+ `NUMERIC_PRECISION_RADIX`,
+ `NUMERIC_SCALE`,
+ `DATETIME_PRECISION`,
+ `INTERVAL_TYPE`,
+ `INTERVAL_PRECISION`,
+ `CHARACTER_SET_CATALOG`,
+ `CHARACTER_SET_SCHEMA`,
+ `CHARACTER_SET_NAME`,
+ `COLLATION_CATALOG`,
+ `COLLATION_SCHEMA`,
+ `COLLATION_NAME`,
+ `UDT_CATALOG`,
+ `UDT_SCHEMA`,
+ `UDT_NAME`,
+ `SCOPE_CATALOG`,
+ `SCOPE_SCHEMA`,
+ `SCOPE_NAME`,
+ `MAXIMUM_CARDINALITY`,
+ `DTD_IDENTIFIER`,
+ `IS_SELF_REFERENCING`,
+ `IS_IDENTITY`,
+ `IDENTITY_GENERATION`,
+ `IDENTITY_START`,
+ `IDENTITY_INCREMENT`,
+ `IDENTITY_MAXIMUM`,
+ `IDENTITY_MINIMUM`,
+ `IDENTITY_CYCLE`,
+ `IS_GENERATED`,
+ `GENERATION_EXPRESSION`,
+ `IS_SYSTEM_TIME_PERIOD_START`,
+ `IS_SYSTEM_TIME_PERIOD_END`,
+ `SYSTEM_TIME_PERIOD_TIMESTAMP_GENERATION`,
+ `IS_UPDATABLE`,
+ `DECLARED_DATA_TYPE`,
+ `DECLARED_NUMERIC_PRECISION`,
+ `DECLARED_NUMERIC_SCALE`
+) AS
+SELECT DISTINCT
+ 'default',
+ D.NAME,
+ T.TBL_NAME,
+ C.COLUMN_NAME,
+ C.INTEGER_IDX,
+ cast (null as string),
+ 'YES',
+ C.TYPE_NAME as TYPE_NAME,
+ CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN
cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as
int)
+ WHEN lower(C.TYPE_NAME) like 'char%' THEN
cast(regexp_extract(upper(C.TYPE_NAME), '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as
int)
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) like 'varchar%' THEN
cast(regexp_extract(upper(C.TYPE_NAME), '^VARCHAR\\s*\\((\\d+)\\s*\\)$', 1) as
int)
+ WHEN lower(C.TYPE_NAME) like 'char%' THEN
cast(regexp_extract(upper(C.TYPE_NAME), '^CHAR\\s*\\((\\d+)\\s*\\)$', 1) as
int)
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+ WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+ WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+ WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+ WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+ WHEN lower(C.TYPE_NAME) like 'decimal%' THEN
regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+ WHEN lower(C.TYPE_NAME) like 'numeric%' THEN
regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+ WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+ WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+ WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) like 'decimal%' THEN
regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+),(\\d+)',2)
+ WHEN lower(C.TYPE_NAME) like 'numeric%' THEN
regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+),(\\d+)',2)
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) = 'date' THEN 0
+ WHEN lower(C.TYPE_NAME) = 'timestamp' THEN 9
+ ELSE null END,
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ C.CD_ID,
+ 'NO',
+ 'NO',
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ cast (null as string),
+ 'NEVER',
+ cast (null as string),
+ 'NO',
+ 'NO',
+ cast (null as string),
+ 'YES',
+ C.TYPE_NAME as DECLARED_DATA_TYPE,
+ CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 19
+ WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'smallint' THEN 5
+ WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 3
+ WHEN lower(C.TYPE_NAME) = 'float' THEN 23
+ WHEN lower(C.TYPE_NAME) = 'double' THEN 53
+ WHEN lower(C.TYPE_NAME) like 'decimal%' THEN
regexp_extract(upper(C.TYPE_NAME), '^DECIMAL\\s*\\((\\d+)',1)
+ WHEN lower(C.TYPE_NAME) like 'numeric%' THEN
regexp_extract(upper(C.TYPE_NAME), '^NUMERIC\\s*\\((\\d+)',1)
+ ELSE null END,
+ CASE WHEN lower(C.TYPE_NAME) = 'bigint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'int' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'smallint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'tinyint' THEN 10
+ WHEN lower(C.TYPE_NAME) = 'float' THEN 2
+ WHEN lower(C.TYPE_NAME) = 'double' THEN 2
+ WHEN lower(C.TYPE_NAME) like 'decimal%' THEN 10
+ WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
+ ELSE null END
+FROM
+ `sys`.`COLUMNS_V2` C JOIN `sys`.`SDS` S ON (C.`CD_ID` = S.`CD_ID`)
+ JOIN `sys`.`TBLS` T ON (S.`SD_ID` = T.`SD_ID`)
+ JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
+ LEFT JOIN `sys`.`TBL_COL_PRIVS` P ON (T.`TBL_ID` =
P.`TBL_ID`)
+WHERE
+ NOT restrict_information_schema() OR P.`TBL_ID` IS NOT NULL
+ AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
+ AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+ OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+ AND array_contains(split_map_privs(P.`TBL_COL_PRIV`),"SELECT") AND
P.`AUTHORIZER`=current_authorizer();
+
+
+CREATE OR REPLACE VIEW `COLUMN_PRIVILEGES`
+(
+ `GRANTOR`,
+ `GRANTEE`,
+ `TABLE_CATALOG`,
+ `TABLE_SCHEMA`,
+ `TABLE_NAME`,
+ `COLUMN_NAME`,
+ `PRIVILEGE_TYPE`,
+ `IS_GRANTABLE`
+) AS
+SELECT DISTINCT
+ P.`GRANTOR`,
+ P.`PRINCIPAL_NAME`,
+ 'default',
+ D.`NAME`,
+ T.`TBL_NAME`,
+ P.`COLUMN_NAME`,
+ P.`TBL_COL_PRIV`,
+ IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
+FROM
+ (SELECT
+ Q.`GRANTOR`,
+ Q.`GRANT_OPTION`,
+ Q.`PRINCIPAL_NAME`,
+ Q.`PRINCIPAL_TYPE`,
+ Q.`AUTHORIZER`,
+ Q.`COLUMN_NAME`,
+ `TBL_COL_PRIV_TMP`.`TBL_COL_PRIV`,
+ Q.`TBL_ID`
+ FROM `sys`.`TBL_COL_PRIVS` AS Q
+ LATERAL VIEW explode(split_map_privs(Q.`TBL_COL_PRIV`))
`TBL_COL_PRIV_TMP` AS `TBL_COL_PRIV`) P
+ JOIN `sys`.`TBLS` T ON (P.`TBL_ID` = T.`TBL_ID`)
+ JOIN `sys`.`DBS` D ON (T.`DB_ID` = D.`DB_ID`)
+ JOIN `sys`.`SDS` S ON (S.`SD_ID` = T.`SD_ID`)
+ LEFT JOIN `sys`.`TBL_PRIVS` P2 ON (P.`TBL_ID` =
P2.`TBL_ID`)
+WHERE
+ NOT restrict_information_schema() OR P2.`TBL_ID` IS NOT NULL
+ AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND P.`PRINCIPAL_TYPE` =
P2.`PRINCIPAL_TYPE`
+ AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+ OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+ AND P2.`TBL_PRIV`='SELECT' AND P.`AUTHORIZER`=current_authorizer() AND
P2.`AUTHORIZER`=current_authorizer();
+
create or replace view SCHEDULED_QUERIES as
select
`SCHEDULED_QUERY_ID` ,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index 76e460e..1a6fc4c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -285,6 +285,7 @@ public final class FunctionRegistry {
system.registerGenericUDF("quote", GenericUDFQuote.class);
system.registerGenericUDF("nvl", GenericUDFCoalesce.class); //HIVE-20961
system.registerGenericUDF("split", GenericUDFSplit.class);
+ system.registerGenericUDF("split_map_privs",
GenericUDFStringToPrivilege.class);
system.registerGenericUDF("str_to_map", GenericUDFStringToMap.class);
system.registerGenericUDF("translate", GenericUDFTranslate.class);
system.registerGenericUDF("validate_acid_sort_order",
GenericUDFValidateAcidSortOrder.class);
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java
index c7a4843..0ec61ca 100644
---
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java
+++
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java
@@ -17,9 +17,12 @@
*/
package org.apache.hadoop.hive.ql.security.authorization;
+import java.util.Arrays;
import java.util.Map;
import java.util.concurrent.TimeUnit;
+import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.recipes.leader.LeaderLatch;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -43,6 +46,7 @@ import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
/**
* PrivilegeSynchronizer defines a thread to synchronize privileges from
* external authorizer to Hive metastore.
@@ -57,7 +61,8 @@ public class PrivilegeSynchronizer implements Runnable {
private PolicyProviderContainer policyProviderContainer;
public PrivilegeSynchronizer(LeaderLatch privilegeSynchronizerLatch,
- PolicyProviderContainer policyProviderContainer, HiveConf hiveConf) {
+ PolicyProviderContainer policyProviderContainer,
+ HiveConf hiveConf) {
this.hiveConf = new HiveConf(hiveConf);
this.hiveConf.set(MetastoreConf.ConfVars.FILTER_HOOK.getVarname(),
DefaultMetaStoreFilterHookImpl.class.getName());
try {
@@ -78,6 +83,9 @@ public class PrivilegeSynchronizer implements Runnable {
for (Map.Entry<String, Map<HiveResourceACLs.Privilege,
HiveResourceACLs.AccessResult>> principalAcls
: principalAclsMap.entrySet()) {
String principal = principalAcls.getKey();
+ int[] columnPrivilegeBits = new int[] {0, 0, 0, 0, 0, 0, 0, 0, 0};
+ boolean columnUpdateFlag = false;
+
for (Map.Entry<HiveResourceACLs.Privilege,
HiveResourceACLs.AccessResult> acl : principalAcls.getValue()
.entrySet()) {
if (acl.getValue() == HiveResourceACLs.AccessResult.ALLOWED) {
@@ -95,16 +103,27 @@ public class PrivilegeSynchronizer implements Runnable {
(int) (System.currentTimeMillis() / 1000), GRANTOR,
PrincipalType.USER, false), authorizer));
break;
case COLUMN:
- privBag.addToPrivileges(
- new HiveObjectPrivilege(new
HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, columnName),
- principal, principalType, new
PrivilegeGrantInfo(acl.getKey().toString(),
- (int) (System.currentTimeMillis() / 1000), GRANTOR,
PrincipalType.USER, false), authorizer));
+
+ int privilegeBit = acl.getKey().ordinal();
+ columnPrivilegeBits[privilegeBit] = 1;
+ columnUpdateFlag = true;
+
break;
default:
throw new RuntimeException("Get unknown object type " +
objectType);
}
}
}
+ if (columnUpdateFlag) {
+ String columnPrivilegeBitsString =
+
StringUtils.join(Arrays.asList(ArrayUtils.toObject(columnPrivilegeBits)), " ");
+ privBag.addToPrivileges(
+ new HiveObjectPrivilege(new HiveObjectRef(HiveObjectType.COLUMN,
dbName, tblName, null, columnName),
+ principal, principalType, new
PrivilegeGrantInfo(columnPrivilegeBitsString,
+ (int) (System.currentTimeMillis() / 1000), GRANTOR,
PrincipalType.USER, false), authorizer));
+
+ columnUpdateFlag = false;
+ }
}
}
diff --git
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java
new file mode 100644
index 0000000..87ce8d5
--- /dev/null
+++
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFStringToPrivilege.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * GenericUDFStringToPrivs.
+ *
+ */
+
+@Description(name = "split_map_privs", value = "_FUNC_(str) - Splits binary
str and maps to privilege type "
+ + "regex", extended = "Example:\n" + " > SELECT _FUNC_('0 1 1 0 1 1 0 0
0') FROM src LIMIT 1;\n"
+ + " [\"UPDATE\", \"CREATE\", \"ALTER\", \"INDEX\"]")
+
+/**
+ * GenericUDFStringToPrivs.
+ * "_FUNC_(str, regex) - Splits binary str and maps to privilege type "
+ * "Example: > SELECT _FUNC_('0 1 1 0 1 1 0 0 0') FROM src LIMIT 1;"
+ * output: " ["UPDATE", "CREATE", "ALTER", "INDEX"]"
+ */
+public class GenericUDFStringToPrivilege extends GenericUDF {
+ private transient ObjectInspectorConverters.Converter[] converters = new
ObjectInspectorConverters.Converter[1];
+
+ //private PrivilegeMap privsMap = new PrivilegeMap();
+ private List<HiveResourceACLs.Privilege> privilegesList =
Arrays.asList(HiveResourceACLs.Privilege.values());
+
+ @Override
+ public ObjectInspector initialize(ObjectInspector[] arguments) throws
UDFArgumentException {
+ checkArgsSize(arguments, 1, 1);
+ checkArgPrimitive(arguments, 0);
+
+ converters[0] = ObjectInspectorConverters
+ .getConverter(arguments[0],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+
+ return ObjectInspectorFactory
+
.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+ }
+
+ @Override public Object evaluate(DeferredObject[] arguments) throws
HiveException {
+ assert (arguments.length == 1);
+
+ if (arguments[0].get() == null) {
+ return null;
+ }
+
+ Text s = (Text) converters[0].convert(arguments[0].get());
+ ArrayList<Text> result = new ArrayList<Text>();
+ int index = 0;
+ //Map<Integer, String> privs = privsMap.getPrivilegeMap();
+
+ for (String str : s.toString().split(" ", -1)) {
+ if ("1".equals(str)) {
+ result.add(new Text(String.valueOf(privilegesList.get(index))));
+ //result.add(new Text(privs.get(index)));
+ }
+ index++;
+ }
+
+ return result;
+ }
+
+ @Override protected String getFuncName() {
+ return "split_map_privs";
+ }
+
+ @Override public String getDisplayString(String[] children) {
+ assert (children.length == 1);
+ return getStandardDisplayString("split_map_privs", children);
+ }
+
+}
diff --git
a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java
new file mode 100644
index 0000000..03df2a5
--- /dev/null
+++
b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestUDFSplitMapPrivs.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import junit.framework.TestCase;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+import org.junit.Test;
+import java.util.ArrayList;
+import java.util.List;
+import static java.util.Arrays.asList;
+import static org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
+import static
org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+
+/**
+*
+* This test a test for udf GenericUDFStringToPrivilege.
+*
+*/
+public class TestUDFSplitMapPrivs extends TestCase {
+ private final GenericUDFStringToPrivilege udf = new
GenericUDFStringToPrivilege();
+ private final Object p0 = new Text("SELECT");
+ private final Object p1 = new Text("UPDATE");
+ private final Object p2 = new Text("CREATE");
+ private final Object p3 = new Text("DROP");
+ private final Object p4 = new Text("ALTER");
+ private final Object p5 = new Text("INDEX");
+ private final Object p6 = new Text("LOCK");
+ private final Object p7 = new Text("READ");
+ private final Object p8 = new Text("WRITE");
+ private final Object p9 = new Text("All");
+
+
+ @Test public void testBinaryStringSplitMapToPrivs() throws HiveException {
+
+ ObjectInspector valueOI0 =
PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+ ObjectInspector[] initArgs = {valueOI0};
+
+ udf.initialize(initArgs);
+
+ DeferredObject args;
+ DeferredObject[] evalArgs;
+
+ args = new DeferredJavaObject(new Text("1 0 0 0 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 0 0 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 0 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 1 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2, p3), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 1 1 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2, p3, p4), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 1 1 1 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2, p3, p4, p5), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 1 1 1 1 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2, p3, p4, p5, p6), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 1 1 1 1 1 1 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p1, p2, p3, p4, p5, p6, p7), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 0 1 1 1 1 1 1 1 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerify(asList(p0, p2, p3, p4, p5, p6, p7, p8), evalArgs);
+
+ }
+
+ @Test public void binaryStringMapingShouldFail() throws HiveException {
+
+ ObjectInspector valueOI0 =
PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+ ObjectInspector[] initArgs = {valueOI0};
+
+ udf.initialize(initArgs);
+ DeferredObject args;
+ DeferredObject[] evalArgs;
+
+ args = new DeferredJavaObject(new Text("1 0 0 0 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerifyNotTrue(asList(p1), evalArgs);
+
+ args = new DeferredJavaObject(new Text("1 1 0 0 0 0 0 0 0 0"));
+ evalArgs = new DeferredObject[] {args};
+ runAndVerifyNotTrue(asList(p0, p5), evalArgs);
+
+ }
+
+ private void runAndVerify(List<Object> expResult, DeferredObject[] evalArgs)
throws HiveException {
+
+ ArrayList output = (ArrayList) udf.evaluate(evalArgs);
+ assertEquals(expResult, output);
+ }
+
+ private void runAndVerifyNotTrue(List<Object> expResult, DeferredObject[]
evalArgs) throws HiveException {
+
+ ArrayList output = (ArrayList) udf.evaluate(evalArgs);
+ assertNotSame(expResult, output);
+ }
+
+}
diff --git a/ql/src/test/queries/clientpositive/split_map_privs.q
b/ql/src/test/queries/clientpositive/split_map_privs.q
new file mode 100644
index 0000000..afaefbe
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/split_map_privs.q
@@ -0,0 +1,17 @@
+--! qt:dataset:src
+set hive.fetch.task.conversion=more;
+
+use default;
+DESCRIBE FUNCTION split_map_privs;
+DESCRIBE FUNCTION EXTENDED split_map_privs;
+
+EXPLAIN SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows);
+
+
+SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows);
diff --git a/ql/src/test/results/clientpositive/llap/show_functions.q.out
b/ql/src/test/results/clientpositive/llap/show_functions.q.out
index 4b38cfb..36c868d 100644
--- a/ql/src/test/results/clientpositive/llap/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/llap/show_functions.q.out
@@ -332,6 +332,7 @@ sort_array_by
soundex
space
split
+split_map_privs
sq_count_check
sqrt
stack
@@ -840,6 +841,7 @@ sort_array_by
soundex
space
split
+split_map_privs
sq_count_check
sqrt
stack
diff --git a/ql/src/test/results/clientpositive/llap/split_map_privs.q.out
b/ql/src/test/results/clientpositive/llap/split_map_privs.q.out
new file mode 100644
index 0000000..1260a4e
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/split_map_privs.q.out
@@ -0,0 +1,66 @@
+PREHOOK: query: use default
+PREHOOK: type: SWITCHDATABASE
+PREHOOK: Input: database:default
+POSTHOOK: query: use default
+POSTHOOK: type: SWITCHDATABASE
+POSTHOOK: Input: database:default
+PREHOOK: query: DESCRIBE FUNCTION split_map_privs
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION split_map_privs
+POSTHOOK: type: DESCFUNCTION
+split_map_privs(str) - Splits binary str and maps to privilege type regex
+PREHOOK: query: DESCRIBE FUNCTION EXTENDED split_map_privs
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESCRIBE FUNCTION EXTENDED split_map_privs
+POSTHOOK: type: DESCFUNCTION
+split_map_privs(str) - Splits binary str and maps to privilege type regex
+Example:
+ > SELECT split_map_privs('0 1 1 0 1 1 0 0 0') FROM src LIMIT 1;
+ ["UPDATE", "CREATE", "ALTER", "INDEX"]
+Function
class:org.apache.hadoop.hive.ql.udf.generic.GenericUDFStringToPrivilege
+Function type:BUILTIN
+PREHOOK: query: EXPLAIN SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: EXPLAIN SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: src
+ Row Limit Per Split: 1
+ Select Operator
+ expressions: split_map_privs('1 0 0 0 0 0 0 0 0 0') (type:
array<string>), split_map_privs('1 0 0 1 0 0 0 0 0 0') (type: array<string>)
+ outputColumnNames: _col0, _col1
+ ListSink
+
+PREHOOK: query: SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT
+ split_map_privs('1 0 0 0 0 0 0 0 0 0'),
+ split_map_privs('1 0 0 1 0 0 0 0 0 0')
+FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+["SELECT"] ["SELECT","DROP"]