Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original) +++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Fri Jul 25 00:38:23 2014 @@ -89,8 +89,7 @@ public class Warehouse { try { Class<? extends MetaStoreFS> handlerClass = (Class<? extends MetaStoreFS>) Class .forName(handlerClassStr, true, JavaUtils.getClassLoader()); - MetaStoreFS handler = (MetaStoreFS) ReflectionUtils.newInstance( - handlerClass, conf); + MetaStoreFS handler = ReflectionUtils.newInstance(handlerClass, conf); return handler; } catch (ClassNotFoundException e) { throw new MetaException("Error in loading MetaStoreFS handler." @@ -563,4 +562,12 @@ public class Warehouse { return values; } + public static Map<String, String> makeSpecFromValues(List<FieldSchema> partCols, + List<String> values) { + Map<String, String> spec = new LinkedHashMap<String, String>(); + for (int i = 0; i < values.size(); i++) { + spec.put(partCols.get(i).getName(), values.get(i)); + } + return spec; + } }
Modified: hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java (original) +++ hive/branches/cbo/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java Fri Jul 25 00:38:23 2014 @@ -727,7 +727,7 @@ public class TxnHandler { } public ShowCompactResponse showCompact(ShowCompactRequest rqst) throws MetaException { - ShowCompactResponse response = new ShowCompactResponse(); + ShowCompactResponse response = new ShowCompactResponse(new ArrayList<ShowCompactResponseElement>()); Connection dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED); Statement stmt = null; try { Modified: hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java (original) +++ hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java Fri Jul 25 00:38:23 2014 @@ -437,9 +437,9 @@ public class DummyRawStoreControlledComm } @Override - public boolean revokePrivileges(PrivilegeBag privileges) throws InvalidObjectException, - MetaException, NoSuchObjectException { - return objectStore.revokePrivileges(privileges); + public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) + throws InvalidObjectException, MetaException, NoSuchObjectException { + return objectStore.revokePrivileges(privileges, grantOption); } @Override Modified: hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java (original) +++ hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java Fri Jul 25 00:38:23 2014 @@ -462,8 +462,8 @@ public class DummyRawStoreForJdoConnecti } @Override - public boolean revokePrivileges(PrivilegeBag privileges) throws InvalidObjectException, - MetaException, NoSuchObjectException { + public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) + throws InvalidObjectException, MetaException, NoSuchObjectException { return false; } Modified: hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java (original) +++ hive/branches/cbo/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java Fri Jul 25 00:38:23 2014 @@ -17,15 +17,12 @@ */ package org.apache.hadoop.hive.metastore.txn; -import junit.framework.Assert; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.IMetaStoreClient; import org.apache.hadoop.hive.metastore.api.*; import org.apache.log4j.Level; import org.apache.log4j.LogManager; import org.junit.After; import org.junit.Before; -import org.junit.Ignore; import org.junit.Test; import java.util.ArrayList; @@ -200,7 +197,7 @@ public class TestCompactionTxnHandler { assertEquals(0, txnHandler.findReadyToClean().size()); ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest()); - assertNull(rsp.getCompacts()); + assertEquals(0, rsp.getCompactsSize()); } @Test Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Fri Jul 25 00:38:23 2014 @@ -101,6 +101,8 @@ import org.apache.hadoop.hive.ql.process import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext.CLIENT_TYPE; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType; @@ -445,7 +447,7 @@ public class Driver implements CommandPr try { perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION); - doAuthorization(sem); + doAuthorization(sem, command); } catch (AuthorizationException authExp) { console.printError("Authorization failed:" + authExp.getMessage() + ". Use SHOW GRANT to get more details."); @@ -483,15 +485,25 @@ public class Driver implements CommandPr } } - public static void doAuthorization(BaseSemanticAnalyzer sem) + /** + * Do authorization using post semantic analysis information in the semantic analyzer + * The original command is also passed so that authorization interface can provide + * more useful information in logs. + * @param sem + * @param command + * @throws HiveException + * @throws AuthorizationException + */ + public static void doAuthorization(BaseSemanticAnalyzer sem, String command) throws HiveException, AuthorizationException { HashSet<ReadEntity> inputs = sem.getInputs(); HashSet<WriteEntity> outputs = sem.getOutputs(); SessionState ss = SessionState.get(); HiveOperation op = ss.getHiveOperation(); Hive db = sem.getDb(); + if (ss.isAuthorizationModeV2()) { - doAuthorizationV2(ss, op, inputs, outputs); + doAuthorizationV2(ss, op, inputs, outputs, command); return; } if (op == null) { @@ -672,11 +684,20 @@ public class Driver implements CommandPr } private static void doAuthorizationV2(SessionState ss, HiveOperation op, HashSet<ReadEntity> inputs, - HashSet<WriteEntity> outputs) throws HiveException { + HashSet<WriteEntity> outputs, String command) throws HiveException { + + HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder(); + + authzContextBuilder.setClientType(ss.isHiveServerQuery() ? CLIENT_TYPE.HIVESERVER2 + : CLIENT_TYPE.HIVECLI); + authzContextBuilder.setUserIpAddress(ss.getUserIpAddress()); + authzContextBuilder.setSessionString(ss.getSessionId()); + authzContextBuilder.setCommandString(command); + HiveOperationType hiveOpType = getHiveOperationType(op); List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputs); List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputs); - ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs); + ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, authzContextBuilder.build()); return; } @@ -703,18 +724,21 @@ public class Driver implements CommandPr //support for authorization on partitions needs to be added String dbname = null; - String tableURI = null; + String objName = null; switch(privObject.getType()){ case DATABASE: dbname = privObject.getDatabase() == null ? null : privObject.getDatabase().getName(); break; case TABLE: dbname = privObject.getTable() == null ? null : privObject.getTable().getDbName(); - tableURI = privObject.getTable() == null ? null : privObject.getTable().getTableName(); + objName = privObject.getTable() == null ? null : privObject.getTable().getTableName(); break; case DFS_DIR: case LOCAL_DIR: - tableURI = privObject.getD(); + objName = privObject.getD(); + break; + case FUNCTION: + objName = privObject.getFunctionName(); break; case DUMMYPARTITION: case PARTITION: @@ -724,7 +748,7 @@ public class Driver implements CommandPr throw new AssertionError("Unexpected object type"); } HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject); - HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, tableURI, + HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName, actionType); hivePrivobjs.add(hPrivObject); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Fri Jul 25 00:38:23 2014 @@ -371,10 +371,14 @@ public enum ErrorMsg { INVALID_DIR(10252, "{0} is not a directory", true), NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the jars. " + "Please update hive.jar.directory or hive.user.install.directory with a valid location", false), - UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254, + UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254, "Principal type GROUP is not supported in this authorization setting", "28000"), INVALID_TABLE_NAME(10255, "Invalid table name {0}", true), INSERT_INTO_IMMUTABLE_TABLE(10256, "Inserting into a non-empty immutable table is not allowed"), + UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL(10257, + "Resource type GLOBAL is not supported in this authorization setting", "28000"), + UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN(10258, + "Resource type COLUMN is not supported in this authorization setting", "28000"), TXNMGR_NOT_SPECIFIED(10260, "Transaction manager not specified correctly, " + "set hive.txn.manager"), Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Fri Jul 25 00:38:23 2014 @@ -66,17 +66,12 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse; -import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; -import org.apache.hadoop.hive.metastore.api.HiveObjectRef; -import org.apache.hadoop.hive.metastore.api.HiveObjectType; import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.metastore.api.PrincipalType; -import org.apache.hadoop.hive.metastore.api.PrivilegeBag; -import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.ShowCompactResponse; @@ -169,15 +164,13 @@ import org.apache.hadoop.hive.ql.plan.Un import org.apache.hadoop.hive.ql.plan.UnlockTableDesc; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils; -import org.apache.hadoop.hive.ql.security.authorization.Privilege; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; -import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; @@ -198,6 +191,7 @@ import org.apache.hadoop.hive.shims.Shim import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ToolRunner; +import org.apache.hive.common.util.AnnotationUtils; import org.stringtemplate.v4.ST; /** @@ -452,7 +446,8 @@ public class DDLTask extends Task<DDLWor RevokeDesc revokeDesc = work.getRevokeDesc(); if (revokeDesc != null) { return grantOrRevokePrivileges(revokeDesc.getPrincipals(), revokeDesc - .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, null, false, false); + .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, null, + revokeDesc.isGrantOption(), false); } ShowGrantDesc showGrantDesc = work.getShowGrantDesc(); @@ -491,14 +486,21 @@ public class DDLTask extends Task<DDLWor return exchangeTablePartition(db, alterTableExchangePartition); } } catch (Throwable e) { - setException(e); - LOG.error(stringifyException(e)); + failed(e); return 1; } assert false; return 0; } + private void failed(Throwable e) { + while (e.getCause() != null && e.getClass() == RuntimeException.class) { + e = e.getCause(); + } + setException(e); + LOG.error(stringifyException(e)); + } + private int showConf(Hive db, ShowConfDesc showConf) throws Exception { ConfVars conf = HiveConf.getConfVars(showConf.getConfName()); if (conf == null) { @@ -562,187 +564,46 @@ public class DDLTask extends Task<DDLWor return ret; } - private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) - throws HiveException { - try { - boolean grantRole = grantOrRevokeRoleDDL.getGrant(); - List<PrincipalDesc> principals = grantOrRevokeRoleDDL.getPrincipalDesc(); - List<String> roles = grantOrRevokeRoleDDL.getRoles(); - - if(SessionState.get().isAuthorizationModeV2()){ - return grantOrRevokeRoleV2(grantOrRevokeRoleDDL); - } - - for (PrincipalDesc principal : principals) { - String userName = principal.getName(); - for (String roleName : roles) { - if (grantRole) { - db.grantRole(roleName, userName, principal.getType(), - grantOrRevokeRoleDDL.getGrantor(), grantOrRevokeRoleDDL - .getGrantorType(), grantOrRevokeRoleDDL.isGrantOption()); - } else { - db.revokeRole(roleName, userName, principal.getType(), - grantOrRevokeRoleDDL.isGrantOption()); - } - } - } - } catch (Exception e) { - throw new HiveException(e); + private HiveAuthorizer getSessionAuthorizer() { + HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + if (authorizer == null) { + authorizer = new HiveV1Authorizer(conf, db); } - return 0; + return authorizer; } - private int grantOrRevokeRoleV2(GrantRevokeRoleDDL grantOrRevokeRoleDDL) throws HiveException { - HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL) + throws HiveException { + HiveAuthorizer authorizer = getSessionAuthorizer(); //convert to the types needed for plugin api HivePrincipal grantorPrinc = null; if(grantOrRevokeRoleDDL.getGrantor() != null){ grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(), - getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType())); + AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType())); } - List<HivePrincipal> hivePrincipals = getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc()); + List<HivePrincipal> principals = + AuthorizationUtils.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc()); List<String> roles = grantOrRevokeRoleDDL.getRoles(); - if(grantOrRevokeRoleDDL.getGrant()){ - authorizer.grantRole(hivePrincipals, roles, - grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc); - } - else{ - authorizer.revokeRole(hivePrincipals, roles, - grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc); + boolean grantOption = grantOrRevokeRoleDDL.isGrantOption(); + if (grantOrRevokeRoleDDL.getGrant()) { + authorizer.grantRole(principals, roles, grantOption, grantorPrinc); + } else { + authorizer.revokeRole(principals, roles, grantOption, grantorPrinc); } return 0; } private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException { - if(SessionState.get().isAuthorizationModeV2()){ - return showGrantsV2(showGrantDesc); - } - - PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc(); - PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj(); - String principalName = principalDesc == null ? null : principalDesc.getName(); - PrincipalType type = principalDesc == null ? null : principalDesc.getType(); - List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>(); - try { - if (hiveObjectDesc == null) { - privs.addAll(db.showPrivilegeGrant(HiveObjectType.GLOBAL, principalName, type, - null, null, null, null)); - } else if (hiveObjectDesc != null && hiveObjectDesc.getObject() == null) { - privs.addAll(db.showPrivilegeGrant(null, principalName, type, null, null, null, null)); - } else { - String obj = hiveObjectDesc.getObject(); - boolean notFound = true; - String dbName = null; - String tableName = null; - Table tableObj = null; - Database dbObj = null; - - if (hiveObjectDesc.getTable()) { - String[] dbTab = splitTableName(obj); - dbName = dbTab[0]; - tableName = dbTab[1]; - dbObj = db.getDatabase(dbName); - tableObj = db.getTable(dbName, tableName); - notFound = (dbObj == null || tableObj == null); - } else { - dbName = hiveObjectDesc.getObject(); - dbObj = db.getDatabase(dbName); - notFound = (dbObj == null); - } - if (notFound) { - throw new HiveException(obj + " can not be found"); - } - - String partName = null; - List<String> partValues = null; - if (hiveObjectDesc.getPartSpec() != null) { - partName = Warehouse - .makePartName(hiveObjectDesc.getPartSpec(), false); - partValues = Warehouse.getPartValuesFromPartName(partName); - } - - if (!hiveObjectDesc.getTable()) { - // show database level privileges - privs.addAll(db.showPrivilegeGrant(HiveObjectType.DATABASE, - principalName, type, dbName, null, null, null)); - } else { - if (showGrantDesc.getColumns() != null) { - // show column level privileges - for (String columnName : showGrantDesc.getColumns()) { - privs.addAll(db.showPrivilegeGrant( - HiveObjectType.COLUMN, principalName, - type, dbName, tableName, partValues, - columnName)); - } - } else if (hiveObjectDesc.getPartSpec() != null) { - // show partition level privileges - privs.addAll(db.showPrivilegeGrant( - HiveObjectType.PARTITION, principalName, type, - dbName, tableName, partValues, null)); - } else { - // show table level privileges - privs.addAll(db.showPrivilegeGrant( - HiveObjectType.TABLE, principalName, type, - dbName, tableName, null, null)); - } - } - } - boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - writeToFile(writeGrantInfo(privs, testMode), showGrantDesc.getResFile()); - } catch (FileNotFoundException e) { - LOG.info("show table status: " + stringifyException(e)); - return 1; - } catch (IOException e) { - LOG.info("show table status: " + stringifyException(e)); - return 1; - } catch (Exception e) { - e.printStackTrace(); - throw new HiveException(e); - } - return 0; - } - - private static String[] splitTableName(String fullName) { - String[] dbTab = fullName.split("\\."); - String[] result = new String[2]; - if (dbTab.length == 2) { - result[0] = dbTab[0]; - result[1] = dbTab[1]; - } else { - result[0] = SessionState.get().getCurrentDatabase(); - result[1] = fullName; - } - return result; - } - - private int showGrantsV2(ShowGrantDesc showGrantDesc) throws HiveException { - HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + HiveAuthorizer authorizer = getSessionAuthorizer(); try { List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges( - getHivePrincipal(showGrantDesc.getPrincipalDesc()), - getHivePrivilegeObject(showGrantDesc.getHiveObj()) - ); - List<HiveObjectPrivilege> privList = new ArrayList<HiveObjectPrivilege>(); - for(HivePrivilegeInfo privInfo : privInfos){ - HivePrincipal principal = privInfo.getPrincipal(); - HivePrivilegeObject privObj = privInfo.getObject(); - HivePrivilege priv = privInfo.getPrivilege(); - - PrivilegeGrantInfo grantInfo = - AuthorizationUtils.getThriftPrivilegeGrantInfo(priv, privInfo.getGrantorPrincipal(), - privInfo.isGrantOption(), privInfo.getGrantTime()); - - //only grantInfo is used - HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new HiveObjectRef( - AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(), - privObj.getTableViewURI(),null,null), principal.getName(), - AuthorizationUtils.getThriftPrincipalType(principal.getType()), grantInfo); - privList.add(thriftObjectPriv); - } + AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()), + AuthorizationUtils.getHivePrivilegeObject(showGrantDesc.getHiveObj(), + showGrantDesc.getColumns())); boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - writeToFile(writeGrantInfo(privList, testMode), showGrantDesc.getResFile()); + writeToFile(writeGrantInfo(privInfos, testMode), showGrantDesc.getResFile()); } catch (IOException e) { throw new HiveException("Error in show grant statement", e); } @@ -754,155 +615,15 @@ public class DDLTask extends Task<DDLWor String grantor, PrincipalType grantorType, boolean grantOption, boolean isGrant) throws HiveException { - if(SessionState.get().isAuthorizationModeV2()){ - return grantOrRevokePrivilegesV2(principals, privileges, privSubjectDesc, grantor, - grantorType, grantOption, isGrant); - } - - if (privileges == null || privileges.size() == 0) { - console.printError("No privilege found."); - return 1; - } - - String dbName = null; - String tableName = null; - Table tableObj = null; - Database dbObj = null; - - try { - - if (privSubjectDesc != null) { - if (privSubjectDesc.getPartSpec() != null && isGrant) { - throw new HiveException("Grant does not support partition level."); - } - String obj = privSubjectDesc.getObject(); - - //get the db, table objects - if (privSubjectDesc.getTable()) { - String[] dbTable = Utilities.getDbTableName(obj); - dbName = dbTable[0]; - tableName = dbTable[1]; - - dbObj = db.getDatabase(dbName); - if (dbObj == null) { - throwNotFound("Database", dbName); - } - tableObj = db.getTable(dbName, tableName); - if (tableObj == null) { - throwNotFound("Table", obj); - } - } else { - dbName = privSubjectDesc.getObject(); - dbObj = db.getDatabase(dbName); - if (dbObj == null) { - throwNotFound("Database", dbName); - } - } - } - - PrivilegeBag privBag = new PrivilegeBag(); - if (privSubjectDesc == null) { - for (int idx = 0; idx < privileges.size(); idx++) { - Privilege priv = privileges.get(idx).getPrivilege(); - if (privileges.get(idx).getColumns() != null - && privileges.get(idx).getColumns().size() > 0) { - throw new HiveException( - "For user-level privileges, column sets should be null. columns=" - + privileges.get(idx).getColumns().toString()); - } - - privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef( - HiveObjectType.GLOBAL, null, null, null, null), null, null, - new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, - grantOption))); - } - } else { - org.apache.hadoop.hive.metastore.api.Partition partObj = null; - List<String> partValues = null; - if (tableObj != null) { - if ((!tableObj.isPartitioned()) - && privSubjectDesc.getPartSpec() != null) { - throw new HiveException( - "Table is not partitioned, but partition name is present: partSpec=" - + privSubjectDesc.getPartSpec().toString()); - } - - if (privSubjectDesc.getPartSpec() != null) { - partObj = db.getPartition(tableObj, privSubjectDesc.getPartSpec(), - false).getTPartition(); - partValues = partObj.getValues(); - } - } - - for (PrivilegeDesc privDesc : privileges) { - List<String> columns = privDesc.getColumns(); - Privilege priv = privDesc.getPrivilege(); - if (columns != null && columns.size() > 0) { - if (!priv.supportColumnLevel()) { - throw new HiveException(priv.toString() - + " does not support column level."); - } - if (privSubjectDesc == null || tableName == null) { - throw new HiveException( - "For user-level/database-level privileges, column sets should be null. columns=" - + columns); - } - for (int i = 0; i < columns.size(); i++) { - privBag.addToPrivileges(new HiveObjectPrivilege( - new HiveObjectRef(HiveObjectType.COLUMN, dbName, tableName, - partValues, columns.get(i)), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption))); - } - } else { - if (privSubjectDesc.getTable()) { - if (privSubjectDesc.getPartSpec() != null) { - privBag.addToPrivileges(new HiveObjectPrivilege( - new HiveObjectRef(HiveObjectType.PARTITION, dbName, - tableName, partValues, null), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption))); - } else { - privBag - .addToPrivileges(new HiveObjectPrivilege( - new HiveObjectRef(HiveObjectType.TABLE, dbName, - tableName, null, null), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption))); - } - } else { - privBag.addToPrivileges(new HiveObjectPrivilege( - new HiveObjectRef(HiveObjectType.DATABASE, dbName, null, - null, null), null, null, new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption))); - } - } - } - } - - for (PrincipalDesc principal : principals) { - for (int i = 0; i < privBag.getPrivileges().size(); i++) { - HiveObjectPrivilege objPrivs = privBag.getPrivileges().get(i); - objPrivs.setPrincipalName(principal.getName()); - objPrivs.setPrincipalType(principal.getType()); - } - if (isGrant) { - db.grantPrivileges(privBag); - } else { - db.revokePrivileges(privBag); - } - } - } catch (Exception e) { - console.printError("Error: " + e.getMessage()); - return 1; - } - - return 0; - } - - private int grantOrRevokePrivilegesV2(List<PrincipalDesc> principals, - List<PrivilegeDesc> privileges, PrivilegeObjectDesc privSubjectDesc, String grantor, - PrincipalType grantorType, boolean grantOption, boolean isGrant) throws HiveException { - HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + HiveAuthorizer authorizer = getSessionAuthorizer(); //Convert to object types used by the authorization plugin interface - List<HivePrincipal> hivePrincipals = getHivePrincipals(principals); - List<HivePrivilege> hivePrivileges = getHivePrivileges(privileges); - HivePrivilegeObject hivePrivObject = getHivePrivilegeObject(privSubjectDesc); - HivePrincipal grantorPrincipal = new HivePrincipal(grantor, getHivePrincipalType(grantorType)); + List<HivePrincipal> hivePrincipals = AuthorizationUtils.getHivePrincipals(principals); + List<HivePrivilege> hivePrivileges = AuthorizationUtils.getHivePrivileges(privileges); + HivePrivilegeObject hivePrivObject = AuthorizationUtils.getHivePrivilegeObject(privSubjectDesc, null); + + HivePrincipal grantorPrincipal = new HivePrincipal( + grantor, AuthorizationUtils.getHivePrincipalType(grantorType)); if(isGrant){ authorizer.grantPrivileges(hivePrincipals, hivePrivileges, hivePrivObject, @@ -915,123 +636,8 @@ public class DDLTask extends Task<DDLWor return 0; } - private HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc privSubjectDesc) - throws HiveException { - if(privSubjectDesc == null){ - return new HivePrivilegeObject(null, null, null); - } - String [] dbTable = Utilities.getDbTableName(privSubjectDesc.getObject()); - return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), dbTable[0], dbTable[1]); - } - - private HivePrincipalType getHivePrincipalType(PrincipalType type) throws HiveException { - if(type == null){ - return null; - } - - switch(type){ - case USER: - return HivePrincipalType.USER; - case ROLE: - return HivePrincipalType.ROLE; - case GROUP: - throw new HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP); - default: - //should not happen as we take care of all existing types - throw new AssertionError("Unsupported authorization type specified"); - } - } - - private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc privSubjectDesc) { - if (privSubjectDesc.getObject() == null) { - return null; - } - return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW : HivePrivilegeObjectType.DATABASE; - } - - private List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> privileges) { - List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>(); - for(PrivilegeDesc privilege : privileges){ - hivePrivileges.add( - new HivePrivilege(privilege.getPrivilege().toString(), privilege.getColumns())); - } - return hivePrivileges; - } - - private List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> principals) throws HiveException { - ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>(); - for(PrincipalDesc principal : principals){ - hivePrincipals.add(getHivePrincipal(principal)); - } - return hivePrincipals; - } - - private HivePrincipal getHivePrincipal(PrincipalDesc principal) throws HiveException { - if (principal == null) { - return null; - } - return new HivePrincipal(principal.getName(), - AuthorizationUtils.getHivePrincipalType(principal.getType())); - } - - private void throwNotFound(String objType, String objName) throws HiveException { - throw new HiveException(objType + " " + objName + " not found"); - } - - private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { - if(SessionState.get().isAuthorizationModeV2()){ - return roleDDLV2(roleDDLDesc); - } - - DataOutputStream outStream = null; - RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); - try { - if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) { - db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName()); - } else if (operation.equals(RoleDDLDesc.RoleOperation.DROP_ROLE)) { - db.dropRole(roleDDLDesc.getName()); - } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT)) { - boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); - List<RolePrincipalGrant> roleGrants = db.getRoleGrantInfoForPrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType()); - writeToFile(writeRoleGrantsInfo(roleGrants, testMode), roleDDLDesc.getResFile()); - } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLES)) { - List<String> roleNames = db.getAllRoleNames(); - //sort the list to get sorted (deterministic) output (for ease of testing) - Collections.sort(roleNames); - Path resFile = new Path(roleDDLDesc.getResFile()); - FileSystem fs = resFile.getFileSystem(conf); - outStream = fs.create(resFile); - for (String roleName : roleNames) { - outStream.writeBytes(roleName); - outStream.write(terminator); - } - outStream.close(); - outStream = null; - } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS)) { - throw new HiveException("Show role principals is not currently supported in " - + "authorization mode V1"); - } - else { - throw new HiveException("Unkown role operation " - + operation.getOperationName()); - } - } catch (HiveException e) { - console.printError("Error in role operation " - + operation.getOperationName() + " on role name " - + roleDDLDesc.getName() + ", error message " + e.getMessage()); - return 1; - } catch (IOException e) { - LOG.info("role ddl exception: " + stringifyException(e)); - return 1; - } finally { - IOUtils.closeStream(outStream); - } - - return 0; - } - - private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, IOException { - HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2(); + private int roleDDL(RoleDDLDesc roleDDLDesc) throws Exception { + HiveAuthorizer authorizer = getSessionAuthorizer(); RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation(); //call the appropriate hive authorizer function switch(operation){ @@ -1044,7 +650,7 @@ public class DDLTask extends Task<DDLWor case SHOW_ROLE_GRANT: boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST); List<HiveRoleGrant> roles = authorizer.getRoleGrantInfoForPrincipal( - new HivePrincipal(roleDDLDesc.getName(), getHivePrincipalType(roleDDLDesc.getPrincipalType()))); + AuthorizationUtils.getHivePrincipal(roleDDLDesc.getName(), roleDDLDesc.getPrincipalType())); writeToFile(writeRolesGrantedInfo(roles, testMode), roleDDLDesc.getResFile()); break; case SHOW_ROLES: @@ -2799,7 +2405,7 @@ public class DDLTask extends Task<DDLWor LOG.warn("show function: " + stringifyException(e)); return 1; } catch (Exception e) { - throw new HiveException(e.toString()); + throw new HiveException(e.toString(), e); } finally { IOUtils.closeStream(outStream); } @@ -3212,7 +2818,7 @@ public class DDLTask extends Task<DDLWor funcClass = functionInfo.getFunctionClass(); } if (funcClass != null) { - desc = funcClass.getAnnotation(Description.class); + desc = AnnotationUtils.getAnnotation(funcClass, Description.class); } if (desc != null) { outStream.writeBytes(desc.value().replace("_FUNC_", funcName)); @@ -3488,7 +3094,7 @@ public class DDLTask extends Task<DDLWor // when column name is specified in describe table DDL, colPath will // will be table_name.column_name String colName = colPath.split("\\.")[1]; - String[] dbTab = splitTableName(tableName); + String[] dbTab = Utilities.getDbTableName(tableName); List<String> colNames = new ArrayList<String>(); colNames.add(colName.toLowerCase()); if (null == part) { @@ -3538,28 +3144,41 @@ public class DDLTask extends Task<DDLWor } } - static String writeGrantInfo(List<HiveObjectPrivilege> privileges, boolean testMode) { + static String writeGrantInfo(List<HivePrivilegeInfo> privileges, boolean testMode) { if (privileges == null || privileges.isEmpty()) { return ""; } StringBuilder builder = new StringBuilder(); //sort the list to get sorted (deterministic) output (for ease of testing) - Collections.sort(privileges); - - for (HiveObjectPrivilege privilege : privileges) { - HiveObjectRef resource = privilege.getHiveObject(); - PrivilegeGrantInfo grantInfo = privilege.getGrantInfo(); + Collections.sort(privileges, new Comparator<HivePrivilegeInfo>() { + @Override + public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) { + int compare = o1.getObject().compareTo(o2.getObject()); + if (compare == 0) { + compare = o1.getPrincipal().compareTo(o2.getPrincipal()); + } + if (compare == 0) { + compare = o1.getPrivilege().compareTo(o2.getPrivilege()); + } + return compare; + } + }); + + for (HivePrivilegeInfo privilege : privileges) { + HivePrincipal principal = privilege.getPrincipal(); + HivePrivilegeObject resource = privilege.getObject(); + HivePrincipal grantor = privilege.getGrantorPrincipal(); - appendNonNull(builder, resource.getDbName(), true); + appendNonNull(builder, resource.getDbname(), true); appendNonNull(builder, resource.getObjectName()); - appendNonNull(builder, resource.getPartValues()); - appendNonNull(builder, resource.getColumnName()); - appendNonNull(builder, privilege.getPrincipalName()); - appendNonNull(builder, privilege.getPrincipalType()); - appendNonNull(builder, grantInfo.getPrivilege()); - appendNonNull(builder, grantInfo.isGrantOption()); - appendNonNull(builder, testMode ? -1 : grantInfo.getCreateTime() * 1000L); - appendNonNull(builder, grantInfo.getGrantor()); + appendNonNull(builder, resource.getPartKeys()); + appendNonNull(builder, resource.getColumns()); + appendNonNull(builder, principal.getName()); + appendNonNull(builder, principal.getType()); + appendNonNull(builder, privilege.getPrivilege().getName()); + appendNonNull(builder, privilege.isGrantOption()); + appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L); + appendNonNull(builder, grantor.getName()); } return builder.toString(); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java Fri Jul 25 00:38:23 2014 @@ -32,8 +32,8 @@ import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.LinkedList; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -44,20 +44,20 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.hooks.ReadEntity; -import org.apache.hadoop.hive.ql.metadata.AuthorizationException; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer; import org.apache.hadoop.hive.ql.plan.Explain; import org.apache.hadoop.hive.ql.plan.ExplainWork; -import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.OperatorDesc; +import org.apache.hadoop.hive.ql.plan.TezWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.StringUtils; +import org.apache.hive.common.util.AnnotationUtils; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; @@ -69,7 +69,7 @@ import org.json.JSONObject; public class ExplainTask extends Task<ExplainWork> implements Serializable { private static final long serialVersionUID = 1L; public static final String EXPL_COLUMN_NAME = "Explain"; - private Set<Operator<?>> visitedOps = new HashSet<Operator<?>>(); + private final Set<Operator<?>> visitedOps = new HashSet<Operator<?>>(); private boolean isLogical = false; public ExplainTask() { @@ -143,7 +143,9 @@ public class ExplainTask extends Task<Ex } if (work.getParseContext() != null) { - out.print("LOGICAL PLAN:"); + if (out != null) { + out.print("LOGICAL PLAN:"); + } JSONObject jsonPlan = outputMap(work.getParseContext().getTopOps(), true, out, jsonOutput, work.getExtended(), 0); if (out != null) { @@ -167,7 +169,7 @@ public class ExplainTask extends Task<Ex public JSONObject getJSONPlan(PrintStream out, String ast, List<Task<?>> tasks, Task<?> fetchTask, boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws Exception { - + // If the user asked for a formatted output, dump the json output // in the output stream JSONObject outJSONObject = new JSONObject(); @@ -335,11 +337,9 @@ public class ExplainTask extends Task<Ex } final List<String> exceptions = new ArrayList<String>(); - Object delegate = SessionState.get().getActiveAuthorizer(); if (delegate != null) { Class itface = SessionState.get().getAuthorizerInterface(); - Object authorizer = AuthorizationFactory.create(delegate, itface, new AuthorizationFactory.AuthorizationExceptionHandler() { public void exception(Exception exception) { @@ -349,7 +349,7 @@ public class ExplainTask extends Task<Ex SessionState.get().setActiveAuthorizer(authorizer); try { - Driver.doAuthorization(analyzer); + Driver.doAuthorization(analyzer, ""); } finally { SessionState.get().setActiveAuthorizer(delegate); } @@ -399,7 +399,7 @@ public class ExplainTask extends Task<Ex } } else if (ent.getValue() instanceof List) { - if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty() + if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty() && ((List<?>)ent.getValue()).get(0) != null && ((List<?>)ent.getValue()).get(0) instanceof TezWork.Dependency) { if (out != null) { @@ -528,7 +528,7 @@ public class ExplainTask extends Task<Ex private JSONObject outputPlan(Serializable work, PrintStream out, boolean extended, boolean jsonOutput, int indent, String appendToHeader) throws Exception { // Check if work has an explain annotation - Annotation note = work.getClass().getAnnotation(Explain.class); + Annotation note = AnnotationUtils.getAnnotation(work.getClass(), Explain.class); String keyJSONObject = null; @@ -587,7 +587,7 @@ public class ExplainTask extends Task<Ex for (Method m : methods) { int prop_indents = jsonOutput ? 0 : indent + 2; - note = m.getAnnotation(Explain.class); + note = AnnotationUtils.getAnnotation(m, Explain.class); if (note instanceof Explain) { Explain xpl_note = (Explain) note; @@ -908,6 +908,7 @@ public class ExplainTask extends Task<Ex * */ public class MethodComparator implements Comparator<Method> { + @Override public int compare(Method m1, Method m2) { return m1.getName().compareTo(m2.getName()); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java Fri Jul 25 00:38:23 2014 @@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.udf.gen import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver; import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction; +import org.apache.hive.common.util.AnnotationUtils; /** * FunctionInfo. @@ -74,7 +75,8 @@ public class FunctionInfo implements Com { this.displayName = displayName; this.tableFunctionResolver = tFnCls; - PartitionTableFunctionDescription def = tableFunctionResolver.getAnnotation(PartitionTableFunctionDescription.class); + PartitionTableFunctionDescription def = AnnotationUtils.getAnnotation( + tableFunctionResolver, PartitionTableFunctionDescription.class); this.isNative = (def == null) ? false : def.isInternal(); this.isInternalTableFunction = isNative; } @@ -136,7 +138,7 @@ public class FunctionInfo implements Com } /** - * Get the display name for this function. This should be transfered into + * Get the display name for this function. This should be transferred into * exprNodeGenericUDFDesc, and will be used as the first parameter to * GenericUDF.getDisplayName() call, instead of hard-coding the function name. * This will solve the problem of displaying only one name when a udf is Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java Fri Jul 25 00:38:23 2014 @@ -144,6 +144,7 @@ import org.apache.hadoop.hive.serde2.typ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.ReflectionUtils; +import org.apache.hive.common.util.AnnotationUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; @@ -1613,14 +1614,14 @@ public final class FunctionRegistry { // the deterministic annotation declares return false; } - UDFType genericUDFType = genericUDF.getClass().getAnnotation(UDFType.class); + UDFType genericUDFType = AnnotationUtils.getAnnotation(genericUDF.getClass(), UDFType.class); if (genericUDFType != null && genericUDFType.deterministic() == false) { return false; } if (genericUDF instanceof GenericUDFBridge) { GenericUDFBridge bridge = (GenericUDFBridge) (genericUDF); - UDFType bridgeUDFType = bridge.getUdfClass().getAnnotation(UDFType.class); + UDFType bridgeUDFType = AnnotationUtils.getAnnotation(bridge.getUdfClass(), UDFType.class); if (bridgeUDFType != null && bridgeUDFType.deterministic() == false) { return false; } @@ -1638,14 +1639,14 @@ public final class FunctionRegistry { * Returns whether a GenericUDF is stateful or not. */ public static boolean isStateful(GenericUDF genericUDF) { - UDFType genericUDFType = genericUDF.getClass().getAnnotation(UDFType.class); + UDFType genericUDFType = AnnotationUtils.getAnnotation(genericUDF.getClass(), UDFType.class); if (genericUDFType != null && genericUDFType.stateful()) { return true; } if (genericUDF instanceof GenericUDFBridge) { GenericUDFBridge bridge = (GenericUDFBridge) genericUDF; - UDFType bridgeUDFType = bridge.getUdfClass().getAnnotation(UDFType.class); + UDFType bridgeUDFType = AnnotationUtils.getAnnotation(bridge.getUdfClass(), UDFType.class); if (bridgeUDFType != null && bridgeUDFType.stateful()) { return true; } @@ -1884,7 +1885,7 @@ public final class FunctionRegistry { /** * Both UDF and UDAF functions can imply order for analytical functions * - * @param name + * @param functionName * name of function * @return true if a GenericUDF or GenericUDAF exists for this name and implyOrder is true, false * otherwise. @@ -1894,7 +1895,8 @@ public final class FunctionRegistry { FunctionInfo info = getFunctionInfo(functionName); if (info != null) { if (info.isGenericUDF()) { - UDFType type = info.getGenericUDF().getClass().getAnnotation(UDFType.class); + UDFType type = + AnnotationUtils.getAnnotation(info.getGenericUDF().getClass(), UDFType.class); if (type != null) { return type.impliesOrder(); } @@ -1961,7 +1963,8 @@ public final class FunctionRegistry { FunctionInfo info = getFunctionInfo(name); GenericUDAFResolver res = info.getGenericUDAFResolver(); if (res != null){ - WindowFunctionDescription desc = res.getClass().getAnnotation(WindowFunctionDescription.class); + WindowFunctionDescription desc = + AnnotationUtils.getAnnotation(res.getClass(), WindowFunctionDescription.class); if (desc != null){ return desc.rankingFunction(); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java Fri Jul 25 00:38:23 2014 @@ -3094,7 +3094,8 @@ public final class Utilities { PartitionDesc partDesc = work.getPathToPartitionInfo().get(strPath); boolean nonNative = partDesc.getTableDesc().isNonNative(); boolean oneRow = partDesc.getInputFileFormatClass() == OneNullRowInputFormat.class; - Properties props = partDesc.getProperties(); + Properties props = SerDeUtils.createOverlayedProperties( + partDesc.getTableDesc().getProperties(), partDesc.getProperties()); Class<? extends HiveOutputFormat> outFileFormat = partDesc.getOutputFileFormatClass(); if (nonNative) { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/WindowFunctionInfo.java Fri Jul 25 00:38:23 2014 @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver; +import org.apache.hive.common.util.AnnotationUtils; @SuppressWarnings("deprecation") public class WindowFunctionInfo implements CommonFunctionInfo @@ -33,7 +34,8 @@ public class WindowFunctionInfo implemen assert fInfo.isGenericUDAF(); this.fInfo = fInfo; Class<? extends GenericUDAFResolver> wfnCls = fInfo.getGenericUDAFResolver().getClass(); - WindowFunctionDescription def = wfnCls.getAnnotation(WindowFunctionDescription.class); + WindowFunctionDescription def = + AnnotationUtils.getAnnotation(wfnCls, WindowFunctionDescription.class); if ( def != null) { supportsWindow = def.supportsWindow(); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorColumnAssignFactory.java Fri Jul 25 00:38:23 2014 @@ -393,7 +393,7 @@ public class VectorColumnAssignFactory { else { BytesWritable bw = (BytesWritable) val; byte[] bytes = bw.getBytes(); - assignBytes(bytes, 0, bytes.length, destIndex); + assignBytes(bytes, 0, bw.getLength(), destIndex); } } }.init(outputBatch, (BytesColumnVector) destCol); @@ -408,7 +408,7 @@ public class VectorColumnAssignFactory { else { Text bw = (Text) val; byte[] bytes = bw.getBytes(); - assignBytes(bytes, 0, bytes.length, destIndex); + assignBytes(bytes, 0, bw.getLength(), destIndex); } } }.init(outputBatch, (BytesColumnVector) destCol); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorExpressionDescriptor.java Fri Jul 25 00:38:23 2014 @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hive.common.util.AnnotationUtils; /** * Describes a vector expression and encapsulates the {@link Mode}, number of arguments, @@ -219,7 +220,8 @@ public class VectorExpressionDescriptor } public Class<?> getVectorExpressionClass(Class<?> udf, Descriptor descriptor) throws HiveException { - VectorizedExpressions annotation = udf.getAnnotation(VectorizedExpressions.class); + VectorizedExpressions annotation = + AnnotationUtils.getAnnotation(udf, VectorizedExpressions.class); if (annotation == null || annotation.value() == null) { return null; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java Fri Jul 25 00:38:23 2014 @@ -100,6 +100,7 @@ import org.apache.hadoop.hive.serde2.typ import org.apache.hadoop.hive.serde2.typeinfo.HiveDecimalUtils; import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; /** * Context class for vectorization execution. @@ -393,13 +394,30 @@ public class VectorizationContext { List<ExprNodeDesc> childrenWithCasts = new ArrayList<ExprNodeDesc>(); boolean atleastOneCastNeeded = false; - for (ExprNodeDesc child : children) { - ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, commonType); - if (castExpression != null) { - atleastOneCastNeeded = true; - childrenWithCasts.add(castExpression); - } else { - childrenWithCasts.add(child); + if (genericUDF instanceof GenericUDFElt) { + int i = 0; + for (ExprNodeDesc child : children) { + TypeInfo castType = commonType; + if (i++ == 0) { + castType = isIntFamily(child.getTypeString()) ? child.getTypeInfo() : TypeInfoFactory.intTypeInfo; + } + ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, castType); + if (castExpression != null) { + atleastOneCastNeeded = true; + childrenWithCasts.add(castExpression); + } else { + childrenWithCasts.add(child); + } + } + } else { + for (ExprNodeDesc child : children) { + ExprNodeDesc castExpression = getImplicitCastExpression(genericUDF, child, commonType); + if (castExpression != null) { + atleastOneCastNeeded = true; + childrenWithCasts.add(castExpression); + } else { + childrenWithCasts.add(child); + } } } if (atleastOneCastNeeded) { @@ -484,7 +502,7 @@ public class VectorizationContext { } else { // Casts to exact types including long to double etc. are needed in some special cases. - if (udf instanceof GenericUDFCoalesce) { + if (udf instanceof GenericUDFCoalesce || udf instanceof GenericUDFElt) { GenericUDF genericUdf = getGenericUDFForCast(castType); List<ExprNodeDesc> children = new ArrayList<ExprNodeDesc>(); children.add(child); @@ -896,6 +914,10 @@ public class VectorizationContext { // Coalesce is a special case because it can take variable number of arguments. return getCoalesceExpression(childExpr, returnType); + } else if (udf instanceof GenericUDFElt) { + + // Coalesce is a special case because it can take variable number of arguments. + return getEltExpression(childExpr, returnType); } else if (udf instanceof GenericUDFBridge) { VectorExpression v = getGenericUDFBridgeVectorExpression((GenericUDFBridge) udf, childExpr, mode, returnType); @@ -948,6 +970,33 @@ public class VectorizationContext { } } + private VectorExpression getEltExpression(List<ExprNodeDesc> childExpr, TypeInfo returnType) + throws HiveException { + int[] inputColumns = new int[childExpr.size()]; + VectorExpression[] vectorChildren = null; + try { + vectorChildren = getVectorExpressions(childExpr, Mode.PROJECTION); + + int i = 0; + for (VectorExpression ve : vectorChildren) { + inputColumns[i++] = ve.getOutputColumn(); + } + + int outColumn = ocm.allocateOutputColumn(getNormalizedTypeName(returnType.getTypeName())); + VectorElt vectorElt = new VectorElt(inputColumns, outColumn); + vectorElt.setOutputType(returnType.getTypeName()); + vectorElt.setChildExpressions(vectorChildren); + return vectorElt; + } finally { + // Free the output columns of the child expressions. + if (vectorChildren != null) { + for (VectorExpression v : vectorChildren) { + ocm.freeOutputColumn(v.getOutputColumn()); + } + } + } + } + /** * Create a filter or boolean-valued expression for column IN ( <list-of-constants> ) */ @@ -1063,10 +1112,9 @@ public class VectorizationContext { ExprNodeDesc child = childExpr.get(0); String inputType = childExpr.get(0).getTypeString(); if (child instanceof ExprNodeConstantDesc) { - // Return a constant vector expression - Object constantValue = ((ExprNodeConstantDesc) child).getValue(); - Decimal128 decimalValue = castConstantToDecimal(constantValue, child.getTypeInfo()); - return getConstantVectorExpression(decimalValue, returnType, Mode.PROJECTION); + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; } if (isIntFamily(inputType)) { return createVectorExpression(CastLongToDecimal.class, childExpr, Mode.PROJECTION, returnType); @@ -1083,49 +1131,15 @@ public class VectorizationContext { throw new HiveException("Unhandled cast input type: " + inputType); } - private Decimal128 castConstantToDecimal(Object scalar, TypeInfo type) throws HiveException { - PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type; - String typename = type.getTypeName(); - Decimal128 d = new Decimal128(); - int scale = HiveDecimalUtils.getScaleForType(ptinfo); - switch (ptinfo.getPrimitiveCategory()) { - case FLOAT: - float floatVal = ((Float) scalar).floatValue(); - d.update(floatVal, (short) scale); - break; - case DOUBLE: - double doubleVal = ((Double) scalar).doubleValue(); - d.update(doubleVal, (short) scale); - break; - case BYTE: - byte byteVal = ((Byte) scalar).byteValue(); - d.update(byteVal, (short) scale); - break; - case SHORT: - short shortVal = ((Short) scalar).shortValue(); - d.update(shortVal, (short) scale); - break; - case INT: - int intVal = ((Integer) scalar).intValue(); - d.update(intVal, (short) scale); - break; - case LONG: - long longVal = ((Long) scalar).longValue(); - d.update(longVal, (short) scale); - break; - case DECIMAL: - HiveDecimal decimalVal = (HiveDecimal) scalar; - d.update(decimalVal.unscaledValue(), (short) scale); - break; - default: - throw new HiveException("Unsupported type "+typename+" for cast to Decimal128"); - } - return d; - } - private VectorExpression getCastToString(List<ExprNodeDesc> childExpr, TypeInfo returnType) throws HiveException { + ExprNodeDesc child = childExpr.get(0); String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } if (inputType.equals("boolean")) { // Boolean must come before the integer family. It's a special case. return createVectorExpression(CastBooleanToStringViaLongToString.class, childExpr, Mode.PROJECTION, null); @@ -1145,7 +1159,13 @@ public class VectorizationContext { private VectorExpression getCastToDoubleExpression(Class<?> udf, List<ExprNodeDesc> childExpr, TypeInfo returnType) throws HiveException { + ExprNodeDesc child = childExpr.get(0); String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } if (isIntFamily(inputType)) { return createVectorExpression(CastLongToDouble.class, childExpr, Mode.PROJECTION, returnType); } else if (inputType.equals("timestamp")) { @@ -1163,7 +1183,13 @@ public class VectorizationContext { private VectorExpression getCastToBoolean(List<ExprNodeDesc> childExpr) throws HiveException { + ExprNodeDesc child = childExpr.get(0); String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } // Long and double are handled using descriptors, string needs to be specially handled. if (inputType.equals("string")) { // string casts to false if it is 0 characters long, otherwise true @@ -1184,7 +1210,13 @@ public class VectorizationContext { private VectorExpression getCastToLongExpression(List<ExprNodeDesc> childExpr) throws HiveException { + ExprNodeDesc child = childExpr.get(0); String inputType = childExpr.get(0).getTypeString(); + if (child instanceof ExprNodeConstantDesc) { + // Don't do constant folding here. Wait until the optimizer is changed to do it. + // Family of related JIRAs: HIVE-7421, HIVE-7422, and HIVE-7424. + return null; + } // Float family, timestamp are handled via descriptor based lookup, int family needs // special handling. if (isIntFamily(inputType)) { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Fri Jul 25 00:38:23 2014 @@ -40,7 +40,7 @@ public class Entity implements Serializa * The type of the entity. */ public static enum Type { - DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR + DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION } /** @@ -64,11 +64,17 @@ public class Entity implements Serializa private Partition p; /** - * The directory if this is a directory. + * The directory if this is a directory */ private String d; /** + * An object that is represented as a String + * Currently used for functions + */ + private String stringObject; + + /** * This is derived from t and p, but we need to serialize this field to make * sure Entity.hashCode() does not need to recursively read into t and p. */ @@ -136,6 +142,21 @@ public class Entity implements Serializa this.d = d; } + public String getFunctionName() { + if (typ == Type.FUNCTION) { + return stringObject; + } + return null; + } + + public void setFunctionName(String funcName) { + if (typ != Type.FUNCTION) { + throw new IllegalArgumentException( + "Set function can't be called on entity if the entity type is not " + Type.FUNCTION); + } + this.stringObject = funcName; + } + /** * Only used by serialization. */ @@ -210,6 +231,24 @@ public class Entity implements Serializa } /** + * Create an entity representing a object with given name, database namespace and type + * @param database - database namespace + * @param strObj - object name as string + * @param type - the entity type. this constructor only supports FUNCTION type currently + */ + public Entity(Database database, String strObj, Type type) { + if (type != Type.FUNCTION) { + throw new IllegalArgumentException("This constructor is supported only for type:" + + Type.FUNCTION); + } + this.database = database; + this.stringObject = strObj; + this.typ = type; + this.complete = true; + name = computeName(); + } + + /** * Get the parameter map of the Entity. */ public Map<String, String> getParameters() { @@ -293,6 +332,8 @@ public class Entity implements Serializa return t.getDbName() + "@" + t.getTableName() + "@" + p.getName(); case DUMMYPARTITION: return p.getName(); + case FUNCTION: + return stringObject; default: return d; } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Fri Jul 25 00:38:23 2014 @@ -82,6 +82,19 @@ public class WriteEntity extends Entity } /** + * Constructor for objects represented as String. + * Currently applicable only for function names. + * @param db + * @param objName + * @param type + * @param writeType + */ + public WriteEntity(Database db, String objName, Type type, WriteType writeType) { + super(db, objName, type); + this.writeType = writeType; + } + + /** * Constructor for a partition. * * @param p Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java Fri Jul 25 00:38:23 2014 @@ -108,7 +108,8 @@ public final class FileDump { buf.append(i); buf.append(": "); buf.append(encoding.getKind()); - if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY) { + if (encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY || + encoding.getKind() == OrcProto.ColumnEncoding.Kind.DICTIONARY_V2) { buf.append("["); buf.append(encoding.getDictionarySize()); buf.append("]"); Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/io/orc/RecordReaderImpl.java Fri Jul 25 00:38:23 2014 @@ -50,6 +50,7 @@ import org.apache.hadoop.hive.ql.exec.ve import org.apache.hadoop.hive.ql.io.sarg.PredicateLeaf; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument.TruthValue; +import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.serde2.io.ByteWritable; import org.apache.hadoop.hive.serde2.io.DateWritable; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -2404,6 +2405,10 @@ class RecordReaderImpl implements Record return Double.valueOf(predObj.toString()); } } else if (statsObj instanceof String) { + // Ex: where d = date '1970-02-01' will be ExprNodeConstantDesc + if (predObj instanceof ExprNodeConstantDesc) { + return ((ExprNodeConstantDesc) predObj).getValue().toString(); + } return predObj.toString(); } else if (statsObj instanceof HiveDecimal) { if (predObj instanceof Long) { Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Fri Jul 25 00:38:23 2014 @@ -1136,10 +1136,10 @@ public class Hive { * @return true on success * @throws HiveException */ - public boolean revokePrivileges(PrivilegeBag privileges) + public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption) throws HiveException { try { - return getMSC().revoke_privileges(privileges); + return getMSC().revoke_privileges(privileges, grantOption); } catch (Exception e) { throw new HiveException(e); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java Fri Jul 25 00:38:23 2014 @@ -12,10 +12,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.hive.common.FileUtils; -import org.apache.hadoop.hive.conf.HiveConf;; -import org.apache.hadoop.hive.metastore.HiveMetaHook; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaHookLoader; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; @@ -176,7 +173,7 @@ public class SessionHiveMetaStoreClient return tables; } - + @Override public boolean tableExists(String databaseName, String tableName) throws MetaException, TException, UnknownDBException { @@ -334,7 +331,7 @@ public class SessionHiveMetaStoreClient " is not writable by " + conf.getUser()); } } catch (IOException err) { - MetaException metaException = + MetaException metaException = new MetaException("Error checking temp table path for " + table.getTableName()); metaException.initCause(err); throw metaException; Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SimpleFetchOptimizer.java Fri Jul 25 00:38:23 2014 @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.optimizer; +import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; @@ -27,6 +28,7 @@ import java.util.Map; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.ContentSummary; +import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FetchTask; @@ -106,9 +108,9 @@ public class SimpleFetchOptimizer implem pctx.getConf(), HiveConf.ConfVars.HIVEFETCHTASKCONVERSION); boolean aggressive = "more".equals(mode); + final int limit = pctx.getQB().getParseInfo().getOuterQueryLimit(); FetchData fetch = checkTree(aggressive, pctx, alias, source); - if (fetch != null && checkThreshold(fetch, pctx)) { - int limit = pctx.getQB().getParseInfo().getOuterQueryLimit(); + if (fetch != null && checkThreshold(fetch, limit, pctx)) { FetchWork fetchWork = fetch.convertToWork(); FetchTask fetchTask = (FetchTask) TaskFactory.get(fetchWork, pctx.getConf()); fetchWork.setSink(fetch.completed(pctx, fetchWork)); @@ -119,7 +121,10 @@ public class SimpleFetchOptimizer implem return null; } - private boolean checkThreshold(FetchData data, ParseContext pctx) throws Exception { + private boolean checkThreshold(FetchData data, int limit, ParseContext pctx) throws Exception { + if (limit > 0 && data.hasOnlyPruningFilter()) { + return true; + } long threshold = HiveConf.getLongVar(pctx.getConf(), HiveConf.ConfVars.HIVEFETCHTASKCONVERSIONTHRESHOLD); if (threshold < 0) { @@ -169,7 +174,7 @@ public class SimpleFetchOptimizer implem PrunedPartitionList pruned = pctx.getPrunedPartitions(alias, ts); if (aggressive || !pruned.hasUnknownPartitions()) { bypassFilter &= !pruned.hasUnknownPartitions(); - return checkOperators(new FetchData(parent, table, pruned, splitSample), ts, + return checkOperators(new FetchData(parent, table, pruned, splitSample, bypassFilter), ts, aggressive, bypassFilter); } } @@ -211,6 +216,7 @@ public class SimpleFetchOptimizer implem private final SplitSample splitSample; private final PrunedPartitionList partsList; private final HashSet<ReadEntity> inputs = new HashSet<ReadEntity>(); + private final boolean onlyPruningFilter; // source table scan private TableScanOperator scanOp; @@ -223,14 +229,23 @@ public class SimpleFetchOptimizer implem this.table = table; this.partsList = null; this.splitSample = splitSample; + this.onlyPruningFilter = false; } private FetchData(ReadEntity parent, Table table, PrunedPartitionList partsList, - SplitSample splitSample) { + SplitSample splitSample, boolean bypassFilter) { this.parent = parent; this.table = table; this.partsList = partsList; this.splitSample = splitSample; + this.onlyPruningFilter = bypassFilter; + } + + /* + * all filters were executed during partition pruning + */ + public boolean hasOnlyPruningFilter() { + return this.onlyPruningFilter; } private FetchWork convertToWork() throws HiveException { @@ -317,7 +332,12 @@ public class SimpleFetchOptimizer implem InputFormat input = HiveInputFormat.getInputFormatFromCache(clazz, conf); summary = ((ContentSummaryInputFormat)input).getContentSummary(path, conf); } else { - summary = path.getFileSystem(conf).getContentSummary(path); + FileSystem fs = path.getFileSystem(conf); + try { + summary = fs.getContentSummary(path); + } catch (FileNotFoundException e) { + return 0; + } } return summary.getLength(); } Modified: hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java URL: http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff ============================================================================== --- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java (original) +++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/StatsOptimizer.java Fri Jul 25 00:38:23 2014 @@ -72,6 +72,7 @@ import org.apache.hadoop.hive.serde2.obj import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hive.common.util.AnnotationUtils; import org.apache.thrift.TException; import com.google.common.collect.Lists; @@ -229,7 +230,7 @@ public class StatsOptimizer implements T // our stats for NDV is approx, not accurate. return null; } - if (aggr.getGenericUDAFName().equals(GenericUDAFSum.class.getAnnotation( + if (aggr.getGenericUDAFName().equals(AnnotationUtils.getAnnotation(GenericUDAFSum.class, Description.class).name())) { if(!(aggr.getParameters().get(0) instanceof ExprNodeConstantDesc)){ return null; @@ -243,7 +244,7 @@ public class StatsOptimizer implements T ois.add(PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveCategory.DECIMAL)); } - else if (aggr.getGenericUDAFName().equals(GenericUDAFCount.class.getAnnotation( + else if (aggr.getGenericUDAFName().equals(AnnotationUtils.getAnnotation(GenericUDAFCount.class, Description.class).name())) { Long rowCnt = 0L; if ((aggr.getParameters().isEmpty() || aggr.getParameters().get(0) instanceof
