Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/MetadataOnlyOptimizer.java
 Fri Jul 25 00:38:23 2014
@@ -17,11 +17,8 @@
  */
 package org.apache.hadoop.hive.ql.optimizer.physical;
 
-import java.io.Serializable;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashSet;
-import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
@@ -29,33 +26,19 @@ import java.util.Stack;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.GroupByOperator;
-import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
-import org.apache.hadoop.hive.ql.io.OneNullRowInputFormat;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
-import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.GraphWalker;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessor;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
 import org.apache.hadoop.hive.ql.lib.Rule;
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
-import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.MapWork;
-import org.apache.hadoop.hive.ql.plan.MapredWork;
-import org.apache.hadoop.hive.ql.plan.OperatorDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
-import org.apache.hadoop.hive.serde.serdeConstants;
-import org.apache.hadoop.hive.serde2.NullStructSerDe;
 
 /**
  *
@@ -72,9 +55,9 @@ import org.apache.hadoop.hive.serde2.Nul
  *
  */
 public class MetadataOnlyOptimizer implements PhysicalPlanResolver {
-  private static final Log LOG = 
LogFactory.getLog(MetadataOnlyOptimizer.class.getName());
+  static final Log LOG = 
LogFactory.getLog(MetadataOnlyOptimizer.class.getName());
 
-  static private class WalkerCtx implements NodeProcessorCtx {
+  static class WalkerCtx implements NodeProcessorCtx {
     /* operators for which there is chance the optimization can be applied */
     private final HashSet<TableScanOperator> possible = new 
HashSet<TableScanOperator>();
     /* operators for which the optimization will be successful */
@@ -129,7 +112,6 @@ public class MetadataOnlyOptimizer imple
     @Override
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
         Object... nodeOutputs) throws SemanticException {
-      TableScanOperator node = (TableScanOperator) nd;
       TableScanOperator tsOp = (TableScanOperator) nd;
       WalkerCtx walkerCtx = (WalkerCtx) procCtx;
       List<Integer> colIDs = tsOp.getNeededColumnIDs();
@@ -174,151 +156,17 @@ public class MetadataOnlyOptimizer imple
 
   @Override
   public PhysicalContext resolve(PhysicalContext pctx) throws 
SemanticException {
-    Dispatcher disp = new MetadataOnlyTaskDispatcher(pctx);
+    Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, 
NodeProcessor>();
+    opRules.put(new RuleRegExp("R1", TableScanOperator.getOperatorName() + 
"%"),
+      new TableScanProcessor());
+    opRules.put(new RuleRegExp("R2",
+      GroupByOperator.getOperatorName() + "%.*" + 
FileSinkOperator.getOperatorName() + "%"),
+      new FileSinkProcessor());
+    Dispatcher disp = new NullScanTaskDispatcher(pctx, opRules);
     GraphWalker ogw = new DefaultGraphWalker(disp);
     ArrayList<Node> topNodes = new ArrayList<Node>();
     topNodes.addAll(pctx.getRootTasks());
     ogw.startWalking(topNodes, null);
     return pctx;
   }
-
-  /**
-   * Iterate over all tasks one-to-one and convert them to metadata only
-   */
-  class MetadataOnlyTaskDispatcher implements Dispatcher {
-
-    private final PhysicalContext physicalContext;
-
-    public MetadataOnlyTaskDispatcher(PhysicalContext context) {
-      super();
-      physicalContext = context;
-    }
-
-    private String getAliasForTableScanOperator(MapWork work,
-        TableScanOperator tso) {
-
-      for (Map.Entry<String, Operator<? extends OperatorDesc>> entry :
-        work.getAliasToWork().entrySet()) {
-        if (entry.getValue() == tso) {
-          return entry.getKey();
-        }
-      }
-
-      return null;
-    }
-
-    private PartitionDesc changePartitionToMetadataOnly(PartitionDesc desc) {
-      if (desc != null) {
-        desc.setInputFileFormatClass(OneNullRowInputFormat.class);
-        desc.setOutputFileFormatClass(HiveIgnoreKeyTextOutputFormat.class);
-        desc.getProperties().setProperty(serdeConstants.SERIALIZATION_LIB,
-          NullStructSerDe.class.getName());
-      }
-      return desc;
-    }
-
-    private List<String> getPathsForAlias(MapWork work, String alias) {
-      List<String> paths = new ArrayList<String>();
-
-      for (Map.Entry<String, ArrayList<String>> entry : 
work.getPathToAliases().entrySet()) {
-        if (entry.getValue().contains(alias)) {
-          paths.add(entry.getKey());
-        }
-      }
-
-      return paths;
-    }
-
-    private void processAlias(MapWork work, String alias) {
-      work.setUseOneNullRowInputFormat(true);
-
-      // Change the alias partition desc
-      PartitionDesc aliasPartn = work.getAliasToPartnInfo().get(alias);
-      changePartitionToMetadataOnly(aliasPartn);
-
-      List<String> paths = getPathsForAlias(work, alias);
-      for (String path : paths) {
-        PartitionDesc partDesc = work.getPathToPartitionInfo().get(path);
-        PartitionDesc newPartition = changePartitionToMetadataOnly(partDesc);
-        Path fakePath = new Path(physicalContext.getContext().getMRTmpPath()
-            + newPartition.getTableName()
-            + encode(newPartition.getPartSpec()));
-        work.getPathToPartitionInfo().remove(path);
-        work.getPathToPartitionInfo().put(fakePath.getName(), newPartition);
-        ArrayList<String> aliases = work.getPathToAliases().remove(path);
-        work.getPathToAliases().put(fakePath.getName(), aliases);
-      }
-    }
-
-    // considered using URLEncoder, but it seemed too much
-    private String encode(Map<String, String> partSpec) {
-      return partSpec.toString().replaceAll("[:/#\\?]", "_");
-    }
-
-    @Override
-    public Object dispatch(Node nd, Stack<Node> stack, Object... nodeOutputs)
-        throws SemanticException {
-      Task<? extends Serializable> task = (Task<? extends Serializable>) nd;
-
-      // create a the context for walking operators
-      ParseContext parseContext = physicalContext.getParseContext();
-      WalkerCtx walkerCtx = new WalkerCtx();
-
-      Map<Rule, NodeProcessor> opRules = new LinkedHashMap<Rule, 
NodeProcessor>();
-      opRules.put(new RuleRegExp("R1",
-        TableScanOperator.getOperatorName() + "%"),
-        new TableScanProcessor());
-      opRules.put(new RuleRegExp("R2",
-        GroupByOperator.getOperatorName() + "%.*" + 
FileSinkOperator.getOperatorName() + "%"),
-        new FileSinkProcessor());
-
-      for (MapWork mapWork: task.getMapWork()) {
-        LOG.debug("Looking at: "+mapWork.getName());
-        Collection<Operator<? extends OperatorDesc>> topOperators
-          = mapWork.getAliasToWork().values();
-        if (topOperators.size() == 0) {
-          LOG.debug("No top operators");
-          return null;
-        }
-
-        LOG.info("Looking for table scans where optimization is applicable");
-
-        // The dispatcher fires the processor corresponding to the closest
-        // matching rule and passes the context along
-        Dispatcher disp = new DefaultRuleDispatcher(null, opRules, walkerCtx);
-        GraphWalker ogw = new PreOrderWalker(disp);
-
-        // Create a list of topOp nodes
-        ArrayList<Node> topNodes = new ArrayList<Node>();
-        // Get the top Nodes for this map-reduce task
-        for (Operator<? extends OperatorDesc>
-               workOperator : topOperators) {
-          if (parseContext.getTopOps().values().contains(workOperator)) {
-            topNodes.add(workOperator);
-          }
-        }
-
-        Operator<? extends OperatorDesc> reducer = task.getReducer(mapWork);
-        if (reducer != null) {
-          topNodes.add(reducer);
-        }
-
-        ogw.startWalking(topNodes, null);
-
-        LOG.info(String.format("Found %d metadata only table scans",
-            walkerCtx.getMetadataOnlyTableScans().size()));
-        Iterator<TableScanOperator> iterator
-          = walkerCtx.getMetadataOnlyTableScans().iterator();
-
-        while (iterator.hasNext()) {
-          TableScanOperator tso = iterator.next();
-          ((TableScanDesc)tso.getConf()).setIsMetadataOnly(true);
-          String alias = getAliasForTableScanOperator(mapWork, tso);
-          LOG.info("Metadata only table scan for " + alias);
-          processAlias(mapWork, alias);
-        }
-      }
-      return null;
-    }
-  }
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/PhysicalOptimizer.java
 Fri Jul 25 00:38:23 2014
@@ -67,6 +67,9 @@ public class PhysicalOptimizer {
     if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES)) {
       resolvers.add(new MetadataOnlyOptimizer());
     }
+    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVENULLSCANOPTIMIZE)) {
+      resolvers.add(new NullScanOptimizer());
+    }
     if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVESAMPLINGFORORDERBY)) {
       resolvers.add(new SamplingOptimizer());
     }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/Vectorizer.java
 Fri Jul 25 00:38:23 2014
@@ -222,6 +222,7 @@ public class Vectorizer implements Physi
     supportedGenericUDFs.add(GenericUDFCase.class);
     supportedGenericUDFs.add(GenericUDFWhen.class);
     supportedGenericUDFs.add(GenericUDFCoalesce.class);
+    supportedGenericUDFs.add(GenericUDFElt.class);
 
     // For type casts
     supportedGenericUDFs.add(UDFToLong.class);

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java
 Fri Jul 25 00:38:23 2014
@@ -288,6 +288,7 @@ public class StatsRulesProcFactory {
         AnnotateStatsProcCtx aspCtx, List<String> neededCols) throws 
CloneNotSupportedException {
       long newNumRows = 0;
       Statistics andStats = null;
+
       if (pred instanceof ExprNodeGenericFuncDesc) {
         ExprNodeGenericFuncDesc genFunc = (ExprNodeGenericFuncDesc) pred;
         GenericUDF udf = genFunc.getGenericUDF();
@@ -334,6 +335,15 @@ public class StatsRulesProcFactory {
 
         // if not boolean column return half the number of rows
         return stats.getNumRows() / 2;
+      } else if (pred instanceof ExprNodeConstantDesc) {
+
+        // special case for handling false constants
+        ExprNodeConstantDesc encd = (ExprNodeConstantDesc) pred;
+        if (encd.getValue().equals(false)) {
+          return 0;
+        } else {
+          return stats.getNumRows();
+        }
       }
 
       return newNumRows;
@@ -429,13 +439,27 @@ public class StatsRulesProcFactory {
           String colName = null;
           String tabAlias = null;
           boolean isConst = false;
+          Object prevConst = null;
 
           for (ExprNodeDesc leaf : genFunc.getChildren()) {
             if (leaf instanceof ExprNodeConstantDesc) {
 
+              // constant = constant expressions. We shouldn't be getting this
+              // after constant folding
+              if (isConst) {
+
+                // special case: if both constants are not equal then return 0
+                if (prevConst != null &&
+                    
!prevConst.equals(((ExprNodeConstantDesc)leaf).getValue())) {
+                  return 0;
+                }
+                return numRows;
+              }
+
               // if the first argument is const then just set the flag and 
continue
               if (colName == null) {
                 isConst = true;
+                prevConst = ((ExprNodeConstantDesc) leaf).getValue();
                 continue;
               }
 

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
 Fri Jul 25 00:38:23 2014
@@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
@@ -32,14 +31,13 @@ import org.apache.hadoop.hive.ql.ErrorMs
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * FunctionSemanticAnalyzer.
@@ -78,7 +76,7 @@ public class FunctionSemanticAnalyzer ex
 
     // find any referenced resources
     List<ResourceUri> resources = getResourceList(ast);
-    
+
     CreateFunctionDesc desc =
         new CreateFunctionDesc(functionName, isTemporaryFunction, className, 
resources);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
@@ -152,15 +150,22 @@ public class FunctionSemanticAnalyzer ex
   }
 
   /**
-   * Add write entities to the semantic analyzer to restrict function creation 
to priviliged users.
+   * Add write entities to the semantic analyzer to restrict function creation 
to privileged users.
    */
   private void addEntities(String functionName, boolean isTemporaryFunction)
       throws SemanticException {
+    // If the function is being added under a database 'namespace', then add 
an entity representing
+    // the database (only applicable to permanent/metastore functions).
+    // We also add a second entity representing the function name.
+    // The authorization api implementation can decide which entities it wants 
to use to
+    // authorize the create/drop function call.
+
+    // Add the relevant database 'namespace' as a WriteEntity
     Database database = null;
-    if (isTemporaryFunction) {
-      // This means temp function creation is also restricted.
-      database = getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
-    } else {
+
+    // temporary functions don't have any database 'namespace' associated with 
it,
+    // it matters only for permanent functions
+    if (!isTemporaryFunction) {
       try {
         String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(functionName);
         String dbName = qualifiedNameParts[0];
@@ -173,5 +178,9 @@ public class FunctionSemanticAnalyzer ex
     if (database != null) {
       outputs.add(new WriteEntity(database, 
WriteEntity.WriteType.DDL_NO_LOCK));
     }
+
+    // Add the function name as a WriteEntity
+    outputs.add(new WriteEntity(database, functionName, Type.FUNCTION,
+        WriteEntity.WriteType.DDL_NO_LOCK));
   }
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
Fri Jul 25 00:38:23 2014
@@ -264,6 +264,7 @@ TOK_RESOURCE_ALL;
 TOK_GRANT_WITH_OPTION;
 TOK_GRANT_WITH_ADMIN_OPTION;
 TOK_ADMIN_OPTION_FOR;
+TOK_GRANT_OPTION_FOR;
 TOK_PRIV_ALL;
 TOK_PRIV_ALTER_METADATA;
 TOK_PRIV_ALTER_DATA;
@@ -1388,8 +1389,8 @@ grantPrivileges
 revokePrivileges
 @init {pushMsg("revoke privileges", state);}
 @afer {popMsg(state);}
-    : KW_REVOKE privilegeList privilegeObject? KW_FROM principalSpecification
-    -> ^(TOK_REVOKE privilegeList principalSpecification privilegeObject?)
+    : KW_REVOKE grantOptionFor? privilegeList privilegeObject? KW_FROM 
principalSpecification
+    -> ^(TOK_REVOKE privilegeList principalSpecification privilegeObject? 
grantOptionFor?)
     ;
 
 grantRole
@@ -1526,6 +1527,13 @@ withGrantOption
     -> ^(TOK_GRANT_WITH_OPTION)
     ;
 
+grantOptionFor
+@init {pushMsg("grant option for", state);}
+@after {popMsg(state);}
+    : KW_GRANT KW_OPTION KW_FOR
+    -> ^(TOK_GRANT_OPTION_FOR)
+;
+
 adminOptionFor
 @init {pushMsg("admin option for", state);}
 @after {popMsg(state);}

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java 
(original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/TezCompiler.java 
Fri Jul 25 00:38:23 2014
@@ -61,6 +61,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.SetReducerParallelism;
 import org.apache.hadoop.hive.ql.optimizer.physical.CrossProductCheck;
 import org.apache.hadoop.hive.ql.optimizer.physical.MetadataOnlyOptimizer;
+import org.apache.hadoop.hive.ql.optimizer.physical.NullScanOptimizer;
 import org.apache.hadoop.hive.ql.optimizer.physical.PhysicalContext;
 import org.apache.hadoop.hive.ql.optimizer.physical.Vectorizer;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
@@ -248,6 +249,12 @@ public class TezCompiler extends TaskCom
     PhysicalContext physicalCtx = new PhysicalContext(conf, pCtx, 
pCtx.getContext(), rootTasks,
        pCtx.getFetchTask());
 
+    if (conf.getBoolVar(HiveConf.ConfVars.HIVENULLSCANOPTIMIZE)) {
+      physicalCtx = new NullScanOptimizer().resolve(physicalCtx);
+    } else {
+      LOG.debug("Skipping null scan query optimization");
+    }
+
     if (conf.getBoolVar(HiveConf.ConfVars.HIVEMETADATAONLYQUERIES)) {
       physicalCtx = new MetadataOnlyOptimizer().resolve(physicalCtx);
     } else {

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/parse/authorization/HiveAuthorizationTaskFactoryImpl.java
 Fri Jul 25 00:38:23 2014
@@ -138,11 +138,16 @@ public class HiveAuthorizationTaskFactor
     List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) 
ast.getChild(0));
     List<PrincipalDesc> principalDesc = 
AuthorizationParseUtils.analyzePrincipalListDef((ASTNode) ast.getChild(1));
     PrivilegeObjectDesc hiveObj = null;
+    boolean grantOption = false;
     if (ast.getChildCount() > 2) {
       ASTNode astChild = (ASTNode) ast.getChild(2);
       hiveObj = analyzePrivilegeObject(astChild, outputs);
+
+      if (null != ast.getFirstChildWithType(HiveParser.TOK_GRANT_OPTION_FOR)) {
+        grantOption = true;
+      }
     }
-    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, 
hiveObj);
+    RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, 
hiveObj, grantOption);
     return TaskFactory.get(new DDLWork(inputs, outputs, revokeDesc), conf);
   }
   @Override

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java 
(original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java 
Fri Jul 25 00:38:23 2014
@@ -23,6 +23,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hive.common.util.AnnotationUtils;
 
 /**
  * GroupByDesc.
@@ -228,7 +229,7 @@ public class GroupByDesc extends Abstrac
     for (AggregationDesc ad : aggregators) {
       if (!ad.getDistinct()) {
         GenericUDAFEvaluator udafEval = ad.getGenericUDAFEvaluator();
-        UDFType annot = udafEval.getClass().getAnnotation(UDFType.class);
+        UDFType annot = AnnotationUtils.getAnnotation(udafEval.getClass(), 
UDFType.class);
         if (annot == null || !annot.distinctLike()) {
           return false;
         }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java 
(original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/RevokeDesc.java 
Fri Jul 25 00:38:23 2014
@@ -31,16 +31,24 @@ public class RevokeDesc extends DDLDesc 
   private List<PrincipalDesc> principals;
 
   private PrivilegeObjectDesc privilegeSubjectDesc;
-  
+
+  private boolean grantOption;
+
   public RevokeDesc(){
   }
 
   public RevokeDesc(List<PrivilegeDesc> privileges,
       List<PrincipalDesc> principals, PrivilegeObjectDesc 
privilegeSubjectDesc) {
+    this(privileges, principals, privilegeSubjectDesc, false);
+  }
+
+  public RevokeDesc(List<PrivilegeDesc> privileges,
+        List<PrincipalDesc> principals, PrivilegeObjectDesc 
privilegeSubjectDesc, boolean grantOption) {
     super();
     this.privileges = privileges;
     this.principals = principals;
     this.privilegeSubjectDesc = privilegeSubjectDesc;
+    this.grantOption = grantOption;
   }
 
   public List<PrivilegeDesc> getPrivileges() {
@@ -66,5 +74,13 @@ public class RevokeDesc extends DDLDesc 
   public void setPrivilegeSubjectDesc(PrivilegeObjectDesc 
privilegeSubjectDesc) {
     this.privilegeSubjectDesc = privilegeSubjectDesc;
   }
+
+  public boolean isGrantOption() {
+    return grantOption;
+  }
+
+  public void setGrantOption(boolean grantOption) {
+    this.grantOption = grantOption;
+  }
   
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/plan/TezEdgeProperty.java
 Fri Jul 25 00:38:23 2014
@@ -1,3 +1,21 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.hadoop.hive.ql.plan;
 
 import org.apache.hadoop.hive.conf.HiveConf;

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/AddResourceProcessor.java
 Fri Jul 25 00:38:23 2014
@@ -24,6 +24,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -37,9 +38,11 @@ public class AddResourceProcessor implem
       .getName());
   public static final LogHelper console = new LogHelper(LOG);
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     command = new VariableSubstitution().substitute(ss.getConf(),command);
@@ -52,11 +55,19 @@ public class AddResourceProcessor implem
           + "] <value> [<value>]*");
       return new CommandProcessorResponse(1);
     }
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.ADD, 
Arrays.asList(tokens));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
     try {
       ss.add_resources(t,
           Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
     } catch (Exception e) {
-      return new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     return new CommandProcessorResponse(0);
   }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CommandProcessorResponse.java
 Fri Jul 25 00:38:23 2014
@@ -27,12 +27,12 @@ import org.apache.hadoop.hive.metastore.
  * is not 0.
  */
 public class CommandProcessorResponse {
-  private int responseCode;
-  private String errorMessage;
-  private String SQLState;
-  private Schema resSchema;
+  private final int responseCode;
+  private final String errorMessage;
+  private final String SQLState;
+  private final Schema resSchema;
 
-  private Throwable exception;
+  private final Throwable exception;
 
   public CommandProcessorResponse(int responseCode) {
     this(responseCode, null, null, null, null);
@@ -50,6 +50,18 @@ public class CommandProcessorResponse {
     this(responseCode, errorMessage, SQLState, schema, null);
   }
 
+  /**
+   * Create CommandProcessorResponse object indicating an error.
+   * Creates new CommandProcessorResponse with responseCode=1, and sets message
+   * from exception argument
+   *
+   * @param e
+   * @return
+   */
+  public static CommandProcessorResponse create(Exception e) {
+    return new CommandProcessorResponse(1, e.getMessage(), null);
+  }
+
   public CommandProcessorResponse(int responseCode, String errorMessage, 
String SQLState,
       Schema schema, Throwable exception) {
     this.responseCode = responseCode;

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/CompileProcessor.java
 Fri Jul 25 00:38:23 2014
@@ -23,6 +23,7 @@ import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.charset.Charset;
+import java.util.Arrays;
 import java.util.StringTokenizer;
 import java.util.concurrent.atomic.AtomicInteger;
 
@@ -34,6 +35,7 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
@@ -107,18 +109,27 @@ public class CompileProcessor implements
   @Override
   public CommandProcessorResponse run(String command) throws 
CommandNeedRetryException {
     SessionState ss = SessionState.get();
-    myId = runCount.getAndIncrement();
     this.command = command;
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.COMPILE, 
Arrays.asList(command));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
+    myId = runCount.getAndIncrement();
+
     try {
       parse(ss);
     } catch (CompileProcessorException e) {
-      return new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     CommandProcessorResponse result = null;
     try {
       result = compile(ss);
     } catch (CompileProcessorException e) {
-      result = new CommandProcessorResponse(1, e.getMessage(), null);
+      return CommandProcessorResponse.create(e);
     }
     return result;
   }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DeleteResourceProcessor.java
 Fri Jul 25 00:38:23 2014
@@ -24,6 +24,7 @@ import org.apache.commons.lang.StringUti
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -36,9 +37,11 @@ public class DeleteResourceProcessor imp
   public static final Log LOG = 
LogFactory.getLog(DeleteResourceProcessor.class.getName());
   public static final LogHelper console = new LogHelper(LOG);
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
     SessionState ss = SessionState.get();
     command = new VariableSubstitution().substitute(ss.getConf(),command);
@@ -52,7 +55,12 @@ public class DeleteResourceProcessor imp
           + "] <value> [<value>]*");
       return new CommandProcessorResponse(1);
     }
-
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.DELETE, 
Arrays.asList(tokens));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
     if (tokens.length >= 2) {
       ss.delete_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, 
tokens.length)));
     } else {

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/DfsProcessor.java
 Fri Jul 25 00:38:23 2014
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.processors;
 
 import java.io.PrintStream;
+import java.util.Arrays;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -27,6 +28,7 @@ import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.parse.VariableSubstitution;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
@@ -53,9 +55,11 @@ public class DfsProcessor implements Com
     dfsSchema.addToFieldSchemas(new FieldSchema(DFS_RESULT_HEADER, "string", 
""));
   }
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) {
 
 
@@ -64,6 +68,13 @@ public class DfsProcessor implements Com
       command = new VariableSubstitution().substitute(ss.getConf(),command);
 
       String[] tokens = command.split("\\s+");
+      CommandProcessorResponse authErrResp =
+          CommandUtil.authorizeCommand(ss, HiveOperationType.DFS, 
Arrays.asList(tokens));
+      if(authErrResp != null){
+        // there was an authorization issue
+        return authErrResp;
+      }
+
       PrintStream oldOut = System.out;
 
       if (ss != null && ss.out != null) {

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/processors/ResetProcessor.java
 Fri Jul 25 00:38:23 2014
@@ -18,17 +18,30 @@
 
 package org.apache.hadoop.hive.ql.processors;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
 public class ResetProcessor implements CommandProcessor {
 
+  @Override
   public void init() {
   }
 
+  @Override
   public CommandProcessorResponse run(String command) throws 
CommandNeedRetryException {
     SessionState ss = SessionState.get();
+
+    CommandProcessorResponse authErrResp =
+        CommandUtil.authorizeCommand(ss, HiveOperationType.RESET, 
Arrays.asList(command));
+    if(authErrResp != null){
+      // there was an authorization issue
+      return authErrResp;
+    }
+
     if (ss.getOverriddenConfigurations().isEmpty()) {
       return new CommandProcessorResponse(0);
     }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
 Fri Jul 25 00:38:23 2014
@@ -18,22 +18,32 @@
 package org.apache.hadoop.hive.ql.security.authorization;
 
 import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.hooks.Entity;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.plan.PrincipalDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeDesc;
+import org.apache.hadoop.hive.ql.plan.PrivilegeObjectDesc;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Utility code shared by hive internal code and sql standard authorization 
plugin implementation
@@ -48,13 +58,19 @@ public class AuthorizationUtils {
    * @throws HiveException
    */
   public static HivePrincipalType getHivePrincipalType(PrincipalType type) 
throws HiveException {
+    if (type == null) {
+      return null;
+    }
     switch(type){
     case USER:
       return HivePrincipalType.USER;
     case ROLE:
       return HivePrincipalType.ROLE;
     case GROUP:
-      throw new 
HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      if (SessionState.get().getAuthorizationMode() == 
SessionState.AuthorizationMode.V2) {
+        throw new 
HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
+      }
+      return HivePrincipalType.GROUP;
     default:
       //should not happen as we take care of all existing types
       throw new AssertionError("Unsupported authorization type specified");
@@ -68,6 +84,9 @@ public class AuthorizationUtils {
    * @return
    */
   public static HivePrivilegeObjectType getHivePrivilegeObjectType(Type type) {
+    if (type == null){
+      return null;
+    }
     switch(type){
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
@@ -80,11 +99,102 @@ public class AuthorizationUtils {
     case PARTITION:
     case DUMMYPARTITION: //need to determine if a different type is needed for 
dummy partitions
       return HivePrivilegeObjectType.PARTITION;
+    case FUNCTION:
+      return HivePrivilegeObjectType.FUNCTION;
     default:
       return null;
     }
   }
 
+  public static HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc 
privSubjectDesc) {
+    if (privSubjectDesc.getObject() == null) {
+      return null;
+    }
+    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW :
+        HivePrivilegeObjectType.DATABASE;
+  }
+
+  public static List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> 
privileges) {
+    List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>();
+    for(PrivilegeDesc privilege : privileges){
+      Privilege priv = privilege.getPrivilege();
+      hivePrivileges.add(
+          new HivePrivilege(priv.toString(), privilege.getColumns(), 
priv.getScopeList()));
+    }
+    return hivePrivileges;
+  }
+
+  public static List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> 
principals)
+      throws HiveException {
+
+    ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>();
+    for(PrincipalDesc principal : principals){
+      hivePrincipals.add(getHivePrincipal(principal));
+    }
+    return hivePrincipals;
+  }
+
+  public static HivePrincipal getHivePrincipal(PrincipalDesc principal) throws 
HiveException {
+    if (principal == null) {
+      return null;
+    }
+    return getHivePrincipal(principal.getName(), principal.getType());
+  }
+
+  public static HivePrincipal getHivePrincipal(String name, PrincipalType 
type) throws HiveException {
+    return new HivePrincipal(name, 
AuthorizationUtils.getHivePrincipalType(type));
+  }
+
+  public static List<HivePrivilegeInfo> 
getPrivilegeInfos(List<HiveObjectPrivilege> privs)
+      throws HiveException {
+    List<HivePrivilegeInfo> hivePrivs = new ArrayList<HivePrivilegeInfo>();
+    for (HiveObjectPrivilege priv : privs) {
+      PrivilegeGrantInfo grantorInfo = priv.getGrantInfo();
+      HiveObjectRef privObject = priv.getHiveObject();
+      HivePrincipal hivePrincipal =
+          getHivePrincipal(priv.getPrincipalName(), priv.getPrincipalType());
+      HivePrincipal grantor =
+          getHivePrincipal(grantorInfo.getGrantor(), 
grantorInfo.getGrantorType());
+      HivePrivilegeObject object = getHiveObjectRef(privObject);
+      HivePrivilege privilege = new HivePrivilege(grantorInfo.getPrivilege(), 
null);
+      hivePrivs.add(new HivePrivilegeInfo(hivePrincipal, privilege, object, 
grantor,
+          grantorInfo.isGrantOption(), grantorInfo.getCreateTime()));
+    }
+    return hivePrivs;
+  }
+
+  public static HivePrivilegeObject getHiveObjectRef(HiveObjectRef privObj) 
throws HiveException {
+    if (privObj == null) {
+      return null;
+    }
+    HivePrivilegeObjectType objType = getHiveObjType(privObj.getObjectType());
+    return new HivePrivilegeObject(objType, privObj.getDbName(), 
privObj.getObjectName(),
+        privObj.getPartValues(), privObj.getColumnName());
+  }
+
+  public static HivePrivilegeObject getHivePrivilegeObject(
+      PrivilegeObjectDesc privSubjectDesc, List<String> columns) throws 
HiveException {
+
+    // null means ALL for show grants, GLOBAL for grant/revoke
+    HivePrivilegeObjectType objectType = null;
+
+    String[] dbTable;
+    List<String> partSpec = null;
+    if (privSubjectDesc == null) {
+      dbTable = new String[] {null, null};
+    } else {
+      if (privSubjectDesc.getTable()) {
+        dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
+      } else {
+        dbTable = new String[] {privSubjectDesc.getObject(), null};
+      }
+      if (privSubjectDesc.getPartSpec() != null) {
+        partSpec = new 
ArrayList<String>(privSubjectDesc.getPartSpec().values());
+      }
+      objectType = getPrivObjectType(privSubjectDesc);
+    }
+    return new HivePrivilegeObject(objectType, dbTable[0], dbTable[1], 
partSpec, columns, null);
+  }
 
   /**
    * Convert authorization plugin principal type to thrift principal type
@@ -99,6 +209,8 @@ public class AuthorizationUtils {
     switch(type){
     case USER:
       return PrincipalType.USER;
+    case GROUP:
+      return PrincipalType.GROUP;
     case ROLE:
       return PrincipalType.ROLE;
     default:
@@ -106,7 +218,6 @@ public class AuthorizationUtils {
     }
   }
 
-
   /**
    * Get thrift privilege grant info
    * @param privilege
@@ -134,21 +245,48 @@ public class AuthorizationUtils {
       return null;
     }
     switch(type){
+    case GLOBAL:
+      return HiveObjectType.GLOBAL;
     case DATABASE:
       return HiveObjectType.DATABASE;
     case TABLE_OR_VIEW:
       return HiveObjectType.TABLE;
     case PARTITION:
       return HiveObjectType.PARTITION;
-    case LOCAL_URI:
-    case DFS_URI:
-      throw new HiveException("Unsupported type " + type);
+    case COLUMN:
+      return HiveObjectType.COLUMN;
     default:
-      //should not happen as we have accounted for all types
-      throw new AssertionError("Unsupported type " + type);
+      throw new HiveException("Unsupported type " + type);
     }
   }
 
+  // V1 to V2 conversion.
+  private static HivePrivilegeObjectType getHiveObjType(HiveObjectType type) 
throws HiveException {
+    if (type == null) {
+      return null;
+    }
+    switch(type){
+      case GLOBAL:
+        if (SessionState.get().getAuthorizationMode() == 
SessionState.AuthorizationMode.V2) {
+          throw new 
HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL);
+        }
+        return HivePrivilegeObjectType.GLOBAL;
+      case DATABASE:
+        return HivePrivilegeObjectType.DATABASE;
+      case TABLE:
+        return HivePrivilegeObjectType.TABLE_OR_VIEW;
+      case PARTITION:
+        return HivePrivilegeObjectType.PARTITION;
+      case COLUMN:
+        if (SessionState.get().getAuthorizationMode() == 
SessionState.AuthorizationMode.V2) {
+          throw new 
HiveException(ErrorMsg.UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN);
+        }
+        return HivePrivilegeObjectType.COLUMN;
+      default:
+        //should not happen as we have accounted for all types
+        throw new AssertionError("Unsupported type " + type);
+    }
+  }
 
   /**
    * Convert thrift HiveObjectRef to plugin HivePrivilegeObject
@@ -161,7 +299,7 @@ public class AuthorizationUtils {
       return null;
     }
     HiveObjectType objType = getThriftHiveObjType(privObj.getType());
-    return new HiveObjectRef(objType, privObj.getDbname(), 
privObj.getTableViewURI(), null, null);
+    return new HiveObjectRef(objType, privObj.getDbname(), 
privObj.getObjectName(), null, null);
   }
 
   public static HivePrivObjectActionType getActionType(Entity privObject) {

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
 Fri Jul 25 00:38:23 2014
@@ -18,7 +18,9 @@
 
 package org.apache.hadoop.hive.ql.security.authorization;
 
+import java.util.ArrayList;
 import java.util.EnumSet;
+import java.util.List;
 
 /**
  * Privilege defines a privilege in Hive. Each privilege has a name and scope 
associated with it.
@@ -65,6 +67,17 @@ public class Privilege {
         && supportedScopeSet.contains(PrivilegeScope.TABLE_LEVEL_SCOPE);
   }
 
+  public List<String> getScopeList() {
+    if (supportedScopeSet == null) {
+      return null;
+    }
+    List<String> scopes = new ArrayList<String>();
+    for (PrivilegeScope scope : supportedScopeSet) {
+      scopes.add(scope.name());
+    }
+    return scopes;
+  }
+
   @Override
   public String toString() {
     return this.getPriv().toString();

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java
 Fri Jul 25 00:38:23 2014
@@ -37,10 +37,11 @@ public interface HiveAuthorizationValida
    * @param hiveOpType
    * @param inputHObjs
    * @param outputHObjs
+   * @param context
    * @throws HiveAuthzPluginException
    * @throws HiveAccessControlException
    */
   void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> 
inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, 
HiveAccessControlException;
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws 
HiveAuthzPluginException, HiveAccessControlException;
 
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java
 Fri Jul 25 00:38:23 2014
@@ -146,11 +146,12 @@ public interface HiveAuthorizer {
    * @param hiveOpType
    * @param inputsHObjs
    * @param outputHObjs
+   * @param context
    * @throws HiveAuthzPluginException
    * @throws HiveAccessControlException
    */
   void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> 
inputsHObjs,
-      List<HivePrivilegeObject> outputHObjs)
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
       throws HiveAuthzPluginException, HiveAccessControlException;
 
   /**

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java
 Fri Jul 25 00:38:23 2014
@@ -80,8 +80,9 @@ public class HiveAuthorizerImpl implemen
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException, 
HiveAccessControlException {
-    authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs);
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
+    authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, 
context);
   }
 
   @Override

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
 Fri Jul 25 00:38:23 2014
@@ -114,7 +114,12 @@ public enum HiveOperationType {
   ALTERVIEW_RENAME,
   ALTERTABLE_COMPACT,
   SHOW_COMPACTIONS,
-  SHOW_TRANSACTIONS
-
+  SHOW_TRANSACTIONS,
+  SET,
+  RESET,
+  DFS,
+  ADD,
+  DELETE,
+  COMPILE
 
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrincipal.java
 Fri Jul 25 00:38:23 2014
@@ -25,10 +25,19 @@ import org.apache.hadoop.hive.common.cla
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrincipal {
+public class HivePrincipal implements Comparable<HivePrincipal> {
+
+  @Override
+  public int compareTo(HivePrincipal o) {
+    int compare = name.compareTo(o.name);
+    if (compare == 0) {
+      compare = type.compareTo(o.type);
+    }
+    return compare;
+  }
 
   public enum HivePrincipalType{
-    USER, ROLE, UNKNOWN
+    USER, GROUP, ROLE, UNKNOWN
   }
 
   @Override

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilege.java
 Fri Jul 25 00:38:23 2014
@@ -22,13 +22,14 @@ import java.util.Locale;
 
 import 
org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import 
org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
+import org.apache.hadoop.hive.ql.security.authorization.PrivilegeScope;
 
 /**
  * Represents the hive privilege being granted/revoked
  */
 @LimitedPrivate(value = { "" })
 @Evolving
-public class HivePrivilege {
+public class HivePrivilege implements Comparable<HivePrivilege> {
   @Override
   public String toString() {
     return "Privilege [name=" + name + ", columns=" + columns + "]";
@@ -36,10 +37,16 @@ public class HivePrivilege {
 
   private final String name;
   private final List<String> columns;
+  private final List<String> supportedScope;
 
-  public HivePrivilege(String name, List<String> columns){
+  public HivePrivilege(String name, List<String> columns) {
+    this(name, columns, null);
+  }
+
+  public HivePrivilege(String name, List<String> columns, List<String> 
supportedScope) {
     this.name = name.toUpperCase(Locale.US);
     this.columns = columns;
+    this.supportedScope = supportedScope;
   }
 
   public String getName() {
@@ -50,6 +57,10 @@ public class HivePrivilege {
     return columns;
   }
 
+  public List<String> getSupportedScope() {
+    return supportedScope;
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;
@@ -82,5 +93,27 @@ public class HivePrivilege {
   }
 
 
+  public boolean supportsScope(PrivilegeScope scope) {
+    return supportedScope != null && supportedScope.contains(scope.name());
+  }
 
+  public int compareTo(HivePrivilege privilege) {
+    int compare = columns != null ?
+        (privilege.columns != null ? compare(columns, privilege.columns) : 1) :
+        (privilege.columns != null ? -1 : 0);
+    if (compare == 0) {
+      compare = name.compareTo(privilege.name);
+    }
+    return compare;
+  }
+
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
+  }
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
 Fri Jul 25 00:38:23 2014
@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
 import 
org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import 
org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
 
@@ -25,51 +29,99 @@ import org.apache.hadoop.hive.common.cla
  */
 @LimitedPrivate(value = { "" })
 @Unstable
-public class HivePrivilegeObject {
+public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
 
   @Override
-  public String toString() {
-    String name = null;
-    switch (type) {
-    case DATABASE:
-      name = dbname;
-      break;
-    case TABLE_OR_VIEW:
-      name = (dbname == null ? "" : dbname + ".") + tableviewname;
-      break;
-    case LOCAL_URI:
-    case DFS_URI:
-      name = tableviewname;
-      break;
-    case PARTITION:
-      break;
+  public int compareTo(HivePrivilegeObject o) {
+    int compare = type.compareTo(o.type);
+    if (compare == 0) {
+      compare = dbname.compareTo(o.dbname);
+    }
+    if (compare == 0) {
+      compare = objectName != null ?
+          (o.objectName != null ? objectName.compareTo(o.objectName) : 1) :
+          (o.objectName != null ? -1 : 0);
+    }
+    if (compare == 0) {
+      compare = partKeys != null ?
+          (o.partKeys != null ? compare(partKeys, o.partKeys) : 1) :
+          (o.partKeys != null ? -1 : 0);
     }
-    return "Object [type=" + type + ", name=" + name + "]";
+    if (compare == 0) {
+      compare = columns != null ?
+          (o.columns != null ? compare(columns, o.columns) : 1) :
+          (o.columns != null ? -1 : 0);
+    }
+    return compare;
+  }
 
+  private int compare(List<String> o1, List<String> o2) {
+    for (int i = 0; i < Math.min(o1.size(), o2.size()); i++) {
+      int compare = o1.get(i).compareTo(o2.get(i));
+      if (compare != 0) {
+        return compare;
+      }
+    }
+    return o1.size() > o2.size() ? 1 : (o1.size() < o2.size() ? -1 : 0);
   }
 
   public enum HivePrivilegeObjectType {
-    DATABASE, TABLE_OR_VIEW, PARTITION, LOCAL_URI, DFS_URI
-  };
-
+    GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, 
COMMAND_PARAMS, FUNCTION
+  } ;
   public enum HivePrivObjectActionType {
     OTHER, INSERT, INSERT_OVERWRITE
   };
+
   private final HivePrivilegeObjectType type;
   private final String dbname;
-  private final String tableviewname;
+  private final String objectName;
+  private final List<String> commandParams;
+  private final List<String> partKeys;
+  private final List<String> columns;
   private final HivePrivObjectActionType actionType;
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String tableViewURI){
-    this(type, dbname, tableViewURI, HivePrivObjectActionType.OTHER);
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String objectName) {
+    this(type, dbname, objectName, HivePrivObjectActionType.OTHER);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String objectName
+      , HivePrivObjectActionType actionType) {
+    this(type, dbname, objectName, null, null, actionType, null);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String objectName,
+      List<String> partKeys, String column) {
+    this(type, dbname, objectName, partKeys,
+        column == null ? null : new ArrayList<String>(Arrays.asList(column)),
+        HivePrivObjectActionType.OTHER, null);
+
+  }
+
+  /**
+   * Create HivePrivilegeObject of type {@link 
HivePrivilegeObjectType.COMMAND_PARAMS}
+   * @param cmdParams
+   * @return
+   */
+  public static HivePrivilegeObject createHivePrivilegeObject(List<String> 
cmdParams) {
+    return new HivePrivilegeObject(HivePrivilegeObjectType.COMMAND_PARAMS, 
null, null, null, null,
+        cmdParams);
+  }
+
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String objectName,
+    List<String> partKeys, List<String> columns, List<String> commandParams) {
+    this(type, dbname, objectName, partKeys, columns, 
HivePrivObjectActionType.OTHER, commandParams);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String tableViewURI,
-      HivePrivObjectActionType actionType) {
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, 
String objectName,
+      List<String> partKeys, List<String> columns, HivePrivObjectActionType 
actionType,
+      List<String> commandParams) {
     this.type = type;
     this.dbname = dbname;
-    this.tableviewname = tableViewURI;
+    this.objectName = objectName;
+    this.partKeys = partKeys;
+    this.columns = columns;
     this.actionType = actionType;
+    this.commandParams = commandParams;
   }
 
   public HivePrivilegeObjectType getType() {
@@ -80,11 +132,72 @@ public class HivePrivilegeObject {
     return dbname;
   }
 
-  public String getTableViewURI() {
-    return tableviewname;
+  /**
+   * @return name of table/view/uri/function name
+   */
+  public String getObjectName() {
+    return objectName;
   }
 
   public HivePrivObjectActionType getActionType() {
     return actionType;
   }
+
+  public List<String> getCommandParams() {
+    return commandParams;
+  }
+
+  public List<String> getPartKeys() {
+    return partKeys;
+  }
+
+  public List<String> getColumns() {
+    return columns;
+  }
+
+  @Override
+  public String toString() {
+    String name = null;
+    switch (type) {
+    case DATABASE:
+      name = dbname;
+      break;
+    case TABLE_OR_VIEW:
+    case PARTITION:
+      name = getDbObjectName(dbname, objectName);
+      if (partKeys != null) {
+        name += partKeys.toString();
+      }
+      break;
+    case FUNCTION:
+      name = getDbObjectName(dbname, objectName);
+      break;
+    case COLUMN:
+    case LOCAL_URI:
+    case DFS_URI:
+      name = objectName;
+      break;
+    case COMMAND_PARAMS:
+      name = commandParams.toString();
+      break;
+    }
+
+    // get the string representing action type if its non default action type
+    String actionTypeStr ="";
+    if (actionType != null) {
+      switch (actionType) {
+      case INSERT:
+      case INSERT_OVERWRITE:
+        actionTypeStr = ", action=" + actionType;
+      default:
+      }
+    }
+
+    return "Object [type=" + type + ", name=" + name + actionTypeStr + "]";
+  }
+
+  private String getDbObjectName(String dbname2, String objectName2) {
+    return (dbname == null ? "" : dbname + ".") + objectName;
+  }
+
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
 Fri Jul 25 00:38:23 2014
@@ -17,12 +17,14 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
@@ -45,12 +47,14 @@ public class GrantPrivAuthUtils {
     RequiredPrivileges reqPrivs = getGrantRequiredPrivileges(hivePrivileges);
 
     // check if this user has necessary privileges (reqPrivs) on this object
-    checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, 
userName, curRoles, isAdmin);
+    checkRequiredPrivileges(reqPrivs, hivePrivObject, metastoreClient, 
userName, curRoles, isAdmin,
+        HiveOperationType.GRANT_PRIVILEGE);
   }
 
   private static void checkRequiredPrivileges(
       RequiredPrivileges reqPrivileges, HivePrivilegeObject hivePrivObject,
-      IMetaStoreClient metastoreClient, String userName, List<String> 
curRoles, boolean isAdmin)
+      IMetaStoreClient metastoreClient, String userName, List<String> 
curRoles, boolean isAdmin,
+      HiveOperationType opType)
           throws HiveAuthzPluginException, HiveAccessControlException {
 
     // keep track of the principals on which privileges have been checked for
@@ -61,9 +65,11 @@ public class GrantPrivAuthUtils {
         metastoreClient, userName, hivePrivObject, curRoles, isAdmin);
 
     // check if required privileges is subset of available privileges
+    List<String> deniedMessages = new ArrayList<String>();
     Collection<SQLPrivTypeGrant> missingPrivs = 
reqPrivileges.findMissingPrivs(availPrivs);
-    SQLAuthorizationUtils.assertNoMissingPrivilege(missingPrivs, new 
HivePrincipal(userName,
-        HivePrincipalType.USER), hivePrivObject);
+    SQLAuthorizationUtils.addMissingPrivMsg(missingPrivs, hivePrivObject, 
deniedMessages);
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), opType, deniedMessages);
   }
 
   private static RequiredPrivileges 
getGrantRequiredPrivileges(List<HivePrivilege> hivePrivileges)

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 Fri Jul 25 00:38:23 2014
@@ -310,6 +310,19 @@ public class Operation2Privilege {
 (null, ADMIN_PRIV_AR));
     op2Priv.put(HiveOperationType.DESCDATABASE, 
PrivRequirement.newIOPrivRequirement
 (null, null));
+    op2Priv.put(HiveOperationType.DFS, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.RESET, PrivRequirement.newIOPrivRequirement
+(null, null));
+    op2Priv.put(HiveOperationType.COMPILE, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.ADD, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.DELETE, PrivRequirement.newIOPrivRequirement
+(ADMIN_PRIV_AR, ADMIN_PRIV_AR));
+    // set command is currently not authorized through the API
+    op2Priv.put(HiveOperationType.SET, PrivRequirement.newIOPrivRequirement
+(null, null));
 
     // The following actions are authorized through SQLStdHiveAccessController,
     // and it is not using this privilege mapping, but it might make sense to 
move it here
@@ -332,11 +345,10 @@ public class Operation2Privilege {
     op2Priv.put(HiveOperationType.SHOW_ROLES, 
PrivRequirement.newIOPrivRequirement
 (null, null));
     op2Priv.put(HiveOperationType.SHOW_ROLE_GRANT, 
PrivRequirement.newIOPrivRequirement
-(null,
-        null));
+(null, null));
     op2Priv.put(HiveOperationType.SHOW_ROLE_PRINCIPALS, 
PrivRequirement.newIOPrivRequirement
-(null,
-        null));
+(null, null));
+
 
 
   }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
 Fri Jul 25 00:38:23 2014
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -185,8 +186,12 @@ public class SQLAuthorizationUtils {
     // get privileges for this user and its role on this object
     PrincipalPrivilegeSet thrifPrivs = null;
     try {
+      HiveObjectRef objectRef = 
AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
+      if (objectRef.getObjectType() == null) {
+        objectRef.setObjectType(HiveObjectType.GLOBAL);
+      }
       thrifPrivs = metastoreClient.get_privilege_set(
-          AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject), userName, 
null);
+          objectRef, userName, null);
     } catch (MetaException e) {
       throwGetPrivErr(e, hivePrivObject, userName);
     } catch (TException e) {
@@ -259,7 +264,7 @@ public class SQLAuthorizationUtils {
       Table thriftTableObj = null;
       try {
         thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(),
-            hivePrivObject.getTableViewURI());
+            hivePrivObject.getObjectName());
       } catch (Exception e) {
         throwGetObjErr(e, hivePrivObject);
       }
@@ -347,18 +352,15 @@ public class SQLAuthorizationUtils {
     }
   }
 
-  public static void assertNoMissingPrivilege(Collection<SQLPrivTypeGrant> 
missingPrivs,
-      HivePrincipal hivePrincipal, HivePrivilegeObject hivePrivObject)
-      throws HiveAccessControlException {
+  public static void addMissingPrivMsg(Collection<SQLPrivTypeGrant> 
missingPrivs,
+      HivePrivilegeObject hivePrivObject, List<String> deniedMessages) {
     if (missingPrivs.size() != 0) {
       // there are some required privileges missing, create error message
       // sort the privileges so that error message is deterministic (for tests)
       List<SQLPrivTypeGrant> sortedmissingPrivs = new 
ArrayList<SQLPrivTypeGrant>(missingPrivs);
       Collections.sort(sortedmissingPrivs);
-
-      String errMsg = "Permission denied. " + hivePrincipal
-          + " does not have following privileges on " + hivePrivObject + " : " 
+ sortedmissingPrivs;
-      throw new HiveAccessControlException(errMsg.toString());
+      String errMsg = sortedmissingPrivs + " on " + hivePrivObject;
+      deniedMessages.add(errMsg);
     }
   }
 
@@ -399,5 +401,16 @@ public class SQLAuthorizationUtils {
     return availPrivs;
   }
 
+  public static void assertNoDeniedPermissions(HivePrincipal hivePrincipal,
+      HiveOperationType hiveOpType, List<String> deniedMessages) throws 
HiveAccessControlException {
+    if (deniedMessages.size() != 0) {
+      Collections.sort(deniedMessages);
+      String errorMessage = "Permission denied: " + hivePrincipal
+          + " does not have following privileges for operation " + hiveOpType 
+ " "
+          + deniedMessages;
+      throw new HiveAccessControlException(errorMessage);
+    }
+  }
+
 
 }

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
 Fri Jul 25 00:38:23 2014
@@ -236,7 +236,7 @@ public class SQLStdHiveAccessController 
       // So this will revoke privileges that are granted by other users.This is
       // not SQL compliant behavior. Need to change/add a metastore api
       // that has desired behavior.
-      metastoreClient.revoke_privileges(new PrivilegeBag(revokePrivs));
+      metastoreClient.revoke_privileges(new PrivilegeBag(revokePrivs), 
grantOption);
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error revoking privileges", e);
     }
@@ -350,19 +350,24 @@ public class SQLStdHiveAccessController 
         + " allowed get principals in a role. " + ADMIN_ONLY_MSG);
     }
     try {
-      GetPrincipalsInRoleResponse princGrantInfo =
-          
metastoreClientFactory.getHiveMetastoreClient().get_principals_in_role(new 
GetPrincipalsInRoleRequest(roleName));
-
-      List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
-      for(RolePrincipalGrant thriftRoleGrant :  
princGrantInfo.getPrincipalGrants()){
-        hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
-      }
-      return hiveRoleGrants;
+      return 
getHiveRoleGrants(metastoreClientFactory.getHiveMetastoreClient(), roleName);
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error getting principals for all 
roles", e);
     }
   }
 
+  public static List<HiveRoleGrant> getHiveRoleGrants(IMetaStoreClient client, 
String roleName)
+      throws Exception {
+    GetPrincipalsInRoleRequest request = new 
GetPrincipalsInRoleRequest(roleName);
+    GetPrincipalsInRoleResponse princGrantInfo = 
client.get_principals_in_role(request);
+
+    List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
+    for(RolePrincipalGrant thriftRoleGrant :  
princGrantInfo.getPrincipalGrants()){
+      hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
+    }
+    return hiveRoleGrants;
+  }
+
   @Override
   public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, 
HivePrivilegeObject privObj)
       throws HiveAuthzPluginException {
@@ -415,8 +420,8 @@ public class SQLStdHiveAccessController 
         }
 
         HivePrivilegeObject resPrivObj = new HivePrivilegeObject(
-            getPluginObjType(msObjRef.getObjectType()), msObjRef.getDbName(),
-            msObjRef.getObjectName());
+            getPluginPrivilegeObjType(msObjRef.getObjectType()), 
msObjRef.getDbName(),
+            msObjRef.getObjectName(), msObjRef.getPartValues(), 
msObjRef.getColumnName());
 
         // result grantor principal
         HivePrincipal grantorPrincipal = new 
HivePrincipal(msGrantInfo.getGrantor(),
@@ -474,8 +479,14 @@ public class SQLStdHiveAccessController 
     return false;
   }
 
-  private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
-      throws HiveAuthzPluginException {
+  /**
+   * Convert metastore object type to HivePrivilegeObjectType.
+   * Also verifies that metastore object type is of a type on which metastore 
privileges are
+   * supported by sql std auth.
+   * @param objectType
+   * @return corresponding HivePrivilegeObjectType
+   */
+  private HivePrivilegeObjectType getPluginPrivilegeObjType(HiveObjectType 
objectType) {
     switch (objectType) {
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
@@ -677,9 +688,6 @@ public class SQLStdHiveAccessController 
     LOG.debug("Configuring hooks : " + hooks);
     hiveConf.setVar(ConfVars.PREEXECHOOKS, hooks);
 
-    // set security command list to only allow set command
-    hiveConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, "set");
-
     // restrict the variables that can be set using set command to a list in 
whitelist
     hiveConf.setIsModWhiteListEnabled(true);
     String whiteListParamsStr = 
hiveConf.getVar(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);

Modified: 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1613335&r1=1613334&r2=1613335&view=diff
==============================================================================
--- 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
 (original)
+++ 
hive/branches/cbo/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
 Fri Jul 25 00:38:23 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
@@ -28,13 +29,13 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.Operation2Privilege.IOType;
 
 public class SQLStdHiveAuthorizationValidator implements 
HiveAuthorizationValidator {
@@ -57,13 +58,13 @@ public class SQLStdHiveAuthorizationVali
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException,
-      HiveAccessControlException {
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
 
     if (LOG.isDebugEnabled()) {
       String msg = "Checking privileges for operation " + hiveOpType + " by 
user "
           + authenticator.getUserName() + " on " + " input objects " + 
inputHObjs
-          + " and output objects " + outputHObjs;
+          + " and output objects " + outputHObjs + ". Context Info: " + 
context;
       LOG.debug(msg);
     }
 
@@ -71,15 +72,22 @@ public class SQLStdHiveAuthorizationVali
     IMetaStoreClient metastoreClient = 
metastoreClientFactory.getHiveMetastoreClient();
 
     // check privileges on input and output objects
-    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, 
IOType.INPUT);
-    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, 
IOType.OUTPUT);
+    List<String> deniedMessages = new ArrayList<String>();
+    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, 
IOType.INPUT, deniedMessages);
+    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, 
IOType.OUTPUT, deniedMessages);
 
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), hiveOpType, deniedMessages);
   }
 
   private void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> hiveObjects,
-      IMetaStoreClient metastoreClient, String userName, IOType ioType)
+      IMetaStoreClient metastoreClient, String userName, IOType ioType, 
List<String> deniedMessages)
       throws HiveAuthzPluginException, HiveAccessControlException {
 
+    if (hiveObjects == null) {
+      return;
+    }
+
     // Compare required privileges and available privileges for each hive 
object
     for (HivePrivilegeObject hiveObj : hiveObjects) {
 
@@ -87,26 +95,34 @@ public class SQLStdHiveAuthorizationVali
           ioType);
 
       // find available privileges
-      RequiredPrivileges availPrivs;
-      if (hiveObj.getType() == HivePrivilegeObjectType.LOCAL_URI
-          || hiveObj.getType() == HivePrivilegeObjectType.DFS_URI) {
-        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new 
Path(hiveObj.getTableViewURI()),
+      RequiredPrivileges availPrivs = new RequiredPrivileges(); //start with 
an empty priv set;
+      switch (hiveObj.getType()) {
+      case LOCAL_URI:
+      case DFS_URI:
+        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new 
Path(hiveObj.getObjectName()),
             conf, userName);
-      } else if (hiveObj.getType() == HivePrivilegeObjectType.PARTITION) {
+        break;
+      case PARTITION:
         // sql std authorization is managing privileges at the table/view 
levels
         // only
         // ignore partitions
         continue;
-      } else {
-        // get the privileges that this user has on the object
+      case COMMAND_PARAMS:
+      case FUNCTION:
+        // operations that have objects of type COMMAND_PARAMS, FUNCTION are 
authorized
+        // solely on the type
+        if (privController.isUserAdmin()) {
+          availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
+        }
+        break;
+      default:
         availPrivs = 
SQLAuthorizationUtils.getPrivilegesFromMetaStore(metastoreClient, userName,
             hiveObj, privController.getCurrentRoleNames(), 
privController.isUserAdmin());
       }
 
       // Verify that there are no missing privileges
       Collection<SQLPrivTypeGrant> missingPriv = 
requiredPrivs.findMissingPrivs(availPrivs);
-      SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new 
HivePrincipal(userName,
-          HivePrincipalType.USER), hiveObj);
+      SQLAuthorizationUtils.addMissingPrivMsg(missingPriv, hiveObj, 
deniedMessages);
 
     }
   }


Reply via email to