Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 Sat Jul 26 23:45:46 2014
@@ -34,12 +34,10 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
-import java.util.Set;
 
 import javax.security.auth.login.LoginException;
 
@@ -47,9 +45,9 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.common.ValidTxnList;
-import org.apache.hadoop.hive.common.ValidTxnListImpl;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.conf.HiveConfUtil;
 import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsRequest;
 import org.apache.hadoop.hive.metastore.api.AddPartitionsResult;
@@ -68,11 +66,15 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
-import org.apache.hadoop.hive.metastore.api.GetOpenTxnsResponse;
 import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleRequest;
 import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse;
 import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
 import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
+import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeRequest;
+import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeResponse;
+import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleRequest;
+import org.apache.hadoop.hive.metastore.api.GrantRevokeRoleResponse;
+import org.apache.hadoop.hive.metastore.api.GrantRevokeType;
 import org.apache.hadoop.hive.metastore.api.HeartbeatRequest;
 import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeRequest;
 import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
@@ -109,7 +111,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.TableStatsRequest;
 import org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore;
 import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
-import org.apache.hadoop.hive.metastore.api.TxnInfo;
 import org.apache.hadoop.hive.metastore.api.TxnOpenException;
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.api.UnknownDBException;
@@ -139,7 +140,7 @@ public class HiveMetaStoreClient impleme
   private boolean isConnected = false;
   private URI metastoreUris[];
   private final HiveMetaHookLoader hookLoader;
-  private final HiveConf conf;
+  protected final HiveConf conf;
   private String tokenStrForm;
   private final boolean localMetaStore;
 
@@ -147,7 +148,7 @@ public class HiveMetaStoreClient impleme
   private int retries = 5;
   private int retryDelaySeconds = 0;
 
-  static final private Log LOG = LogFactory.getLog("hive.metastore");
+  static final protected Log LOG = LogFactory.getLog("hive.metastore");
 
   public HiveMetaStoreClient(HiveConf conf)
     throws MetaException {
@@ -164,7 +165,7 @@ public class HiveMetaStoreClient impleme
     this.conf = conf;
 
     String msUri = conf.getVar(HiveConf.ConfVars.METASTOREURIS);
-    localMetaStore = (msUri == null) ? true : msUri.trim().isEmpty();
+    localMetaStore = HiveConfUtil.isEmbeddedMetaStore(msUri);
     if (localMetaStore) {
       // instantiate the metastore server handler directly instead of 
connecting
       // through the network
@@ -555,7 +556,8 @@ public class HiveMetaStoreClient impleme
     }
     boolean success = false;
     try {
-      client.create_table_with_environment_context(tbl, envContext);
+      // Subclasses can override this step (for example, for temporary tables)
+      create_table_with_environment_context(tbl, envContext);
       if (hook != null) {
         hook.commitCreateTable(tbl);
       }
@@ -617,7 +619,8 @@ public class HiveMetaStoreClient impleme
        List<String> tableList = getAllTables(name);
        for (String table : tableList) {
          try {
-            dropTable(name, table, deleteData, false);
+           // Subclasses can override this step (for example, for temporary 
tables)
+           dropTable(name, table, deleteData, false);
          } catch (UnsupportedOperationException e) {
            // Ignore Index tables, those will be dropped with parent tables
          }
@@ -771,7 +774,7 @@ public class HiveMetaStoreClient impleme
     }
     boolean success = false;
     try {
-      client.drop_table_with_environment_context(dbname, name, deleteData, 
envContext);
+      drop_table_with_environment_context(dbname, name, deleteData, 
envContext);
       if (hook != null) {
         hook.commitDropTable(tbl, deleteData);
       }
@@ -1342,7 +1345,7 @@ public class HiveMetaStoreClient impleme
     return copy;
   }
 
-  private Table deepCopy(Table table) {
+  protected Table deepCopy(Table table) {
     Table copy = null;
     if (table != null) {
       copy = new Table(table);
@@ -1382,6 +1385,14 @@ public class HiveMetaStoreClient impleme
     return copy;
   }
 
+  protected PrincipalPrivilegeSet deepCopy(PrincipalPrivilegeSet pps) {
+    PrincipalPrivilegeSet copy = null;
+    if (pps != null) {
+      copy = new PrincipalPrivilegeSet(pps);
+    }
+    return copy;
+  }
+
   private List<Partition> deepCopyPartitions(List<Partition> partitions) {
     return deepCopyPartitions(partitions, null);
   }
@@ -1433,8 +1444,19 @@ public class HiveMetaStoreClient impleme
   public boolean grant_role(String roleName, String userName,
       PrincipalType principalType, String grantor, PrincipalType grantorType,
       boolean grantOption) throws MetaException, TException {
-    return client.grant_role(roleName, userName, principalType, grantor,
-        grantorType, grantOption);
+    GrantRevokeRoleRequest req = new GrantRevokeRoleRequest();
+    req.setRequestType(GrantRevokeType.GRANT);
+    req.setRoleName(roleName);
+    req.setPrincipalName(userName);
+    req.setPrincipalType(principalType);
+    req.setGrantor(grantor);
+    req.setGrantorType(grantorType);
+    req.setGrantOption(grantOption);
+    GrantRevokeRoleResponse res = client.grant_revoke_role(req);
+    if (!res.isSetSuccess()) {
+      throw new MetaException("GrantRevokeResponse missing success field");
+    }
+    return res.isSuccess();
   }
 
   @Override
@@ -1474,19 +1496,44 @@ public class HiveMetaStoreClient impleme
   @Override
   public boolean grant_privileges(PrivilegeBag privileges)
       throws MetaException, TException {
-    return client.grant_privileges(privileges);
+    GrantRevokePrivilegeRequest req = new GrantRevokePrivilegeRequest();
+    req.setRequestType(GrantRevokeType.GRANT);
+    req.setPrivileges(privileges);
+    GrantRevokePrivilegeResponse res = client.grant_revoke_privileges(req);
+    if (!res.isSetSuccess()) {
+      throw new MetaException("GrantRevokePrivilegeResponse missing success 
field");
+    }
+    return res.isSuccess();
   }
 
   @Override
   public boolean revoke_role(String roleName, String userName,
-      PrincipalType principalType) throws MetaException, TException {
-    return client.revoke_role(roleName, userName, principalType);
+      PrincipalType principalType, boolean grantOption) throws MetaException, 
TException {
+    GrantRevokeRoleRequest req = new GrantRevokeRoleRequest();
+    req.setRequestType(GrantRevokeType.REVOKE);
+    req.setRoleName(roleName);
+    req.setPrincipalName(userName);
+    req.setPrincipalType(principalType);
+    req.setGrantOption(grantOption);
+    GrantRevokeRoleResponse res = client.grant_revoke_role(req);
+    if (!res.isSetSuccess()) {
+      throw new MetaException("GrantRevokeResponse missing success field");
+    }
+    return res.isSuccess();
   }
 
   @Override
-  public boolean revoke_privileges(PrivilegeBag privileges) throws 
MetaException,
+  public boolean revoke_privileges(PrivilegeBag privileges, boolean 
grantOption) throws MetaException,
       TException {
-    return client.revoke_privileges(privileges);
+    GrantRevokePrivilegeRequest req = new GrantRevokePrivilegeRequest();
+    req.setRequestType(GrantRevokeType.REVOKE);
+    req.setPrivileges(privileges);
+    req.setRevokeGrantOption(grantOption);
+    GrantRevokePrivilegeResponse res = client.grant_revoke_privileges(req);
+    if (!res.isSetSuccess()) {
+      throw new MetaException("GrantRevokePrivilegeResponse missing success 
field");
+    }
+    return res.isSuccess();
   }
 
   @Override
@@ -1727,4 +1774,15 @@ public class HiveMetaStoreClient impleme
     return client.get_functions(dbName, pattern);
   }
 
+  protected void create_table_with_environment_context(Table tbl, 
EnvironmentContext envContext)
+      throws AlreadyExistsException, InvalidObjectException,
+      MetaException, NoSuchObjectException, TException {
+    client.create_table_with_environment_context(tbl, envContext);
+  }
+
+  protected void drop_table_with_environment_context(String dbname, String 
name,
+      boolean deleteData, EnvironmentContext envContext) throws MetaException, 
TException,
+      NoSuchObjectException, UnsupportedOperationException {
+    client.drop_table_with_environment_context(dbname, name, deleteData, 
envContext);
+  }
 }

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
 Sat Jul 26 23:45:46 2014
@@ -52,6 +52,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.GetPrincipalsInRoleResponse;
 import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalRequest;
 import org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
+import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeRequest;
+import org.apache.hadoop.hive.metastore.api.GrantRevokePrivilegeResponse;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -955,7 +957,7 @@ public interface IMetaStoreClient {
    * @throws TException
    */
   public boolean revoke_role(String role_name, String user_name,
-      PrincipalType principalType) throws MetaException, TException;
+      PrincipalType principalType, boolean grantOption) throws MetaException, 
TException;
 
   /**
    *
@@ -1010,7 +1012,7 @@ public interface IMetaStoreClient {
    * @throws MetaException
    * @throws TException
    */
-  public boolean revoke_privileges(PrivilegeBag privileges)
+  public boolean revoke_privileges(PrivilegeBag privileges, boolean 
grantOption)
       throws MetaException, TException;
 
   /**

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 Sat Jul 26 23:45:46 2014
@@ -3083,13 +3083,25 @@ public class ObjectStore implements RawS
   }
 
   @Override
-  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType) throws MetaException, NoSuchObjectException {
+  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType,
+      boolean grantOption) throws MetaException, NoSuchObjectException {
     boolean success = false;
     try {
       openTransaction();
       MRoleMap roleMember = getMSecurityUserRoleMap(userName, principalType,
           role.getRoleName());
-      pm.deletePersistent(roleMember);
+      if (grantOption) {
+        // Revoke with grant option - only remove the grant option but keep 
the role.
+        if (roleMember.getGrantOption()) {
+          roleMember.setGrantOption(false);
+        } else {
+          throw new MetaException("User " + userName
+              + " does not have grant option with role " + role.getRoleName());
+        }
+      } else {
+        // No grant option in revoke, remove the whole role.
+        pm.deletePersistent(roleMember);
+      }
       success = commitTransaction();
     } finally {
       if (!success) {
@@ -3904,7 +3916,7 @@ public class ObjectStore implements RawS
   }
 
   @Override
-  public boolean revokePrivileges(PrivilegeBag privileges)
+  public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
       throws InvalidObjectException, MetaException, NoSuchObjectException {
     boolean committed = false;
     try {
@@ -3938,6 +3950,14 @@ public class ObjectStore implements RawS
                   String userGrantPrivs = userGrant.getPrivilege();
                   if (privilege.equals(userGrantPrivs)) {
                     found = true;
+                    if (grantOption) {
+                      if (userGrant.getGrantOption()) {
+                        userGrant.setGrantOption(false);
+                      } else {
+                        throw new MetaException("User " + userName
+                            + " does not have grant option with privilege " + 
privilege);
+                      }
+                    }
                     persistentObjs.add(userGrant);
                     break;
                   }
@@ -3961,6 +3981,14 @@ public class ObjectStore implements RawS
                   String dbGrantPriv = dbGrant.getPrivilege();
                   if (privilege.equals(dbGrantPriv)) {
                     found = true;
+                    if (grantOption) {
+                      if (dbGrant.getGrantOption()) {
+                        dbGrant.setGrantOption(false);
+                      } else {
+                        throw new MetaException("User " + userName
+                            + " does not have grant option with privilege " + 
privilege);
+                      }
+                    }
                     persistentObjs.add(dbGrant);
                     break;
                   }
@@ -3982,6 +4010,14 @@ public class ObjectStore implements RawS
                 String tableGrantPriv = tabGrant.getPrivilege();
                 if (privilege.equalsIgnoreCase(tableGrantPriv)) {
                   found = true;
+                  if (grantOption) {
+                    if (tabGrant.getGrantOption()) {
+                      tabGrant.setGrantOption(false);
+                    } else {
+                      throw new MetaException("User " + userName
+                          + " does not have grant option with privilege " + 
privilege);
+                    }
+                  }
                   persistentObjs.add(tabGrant);
                   break;
                 }
@@ -4008,6 +4044,14 @@ public class ObjectStore implements RawS
                 String partPriv = partGrant.getPrivilege();
                 if (partPriv.equalsIgnoreCase(privilege)) {
                   found = true;
+                  if (grantOption) {
+                    if (partGrant.getGrantOption()) {
+                      partGrant.setGrantOption(false);
+                    } else {
+                      throw new MetaException("User " + userName
+                          + " does not have grant option with privilege " + 
privilege);
+                    }
+                  }
                   persistentObjs.add(partGrant);
                   break;
                 }
@@ -4039,6 +4083,14 @@ public class ObjectStore implements RawS
                     String colPriv = col.getPrivilege();
                     if (colPriv.equalsIgnoreCase(privilege)) {
                       found = true;
+                      if (grantOption) {
+                        if (col.getGrantOption()) {
+                          col.setGrantOption(false);
+                        } else {
+                          throw new MetaException("User " + userName
+                              + " does not have grant option with privilege " 
+ privilege);
+                        }
+                      }
                       persistentObjs.add(col);
                       break;
                     }
@@ -4063,6 +4115,14 @@ public class ObjectStore implements RawS
                     String colPriv = col.getPrivilege();
                     if (colPriv.equalsIgnoreCase(privilege)) {
                       found = true;
+                      if (grantOption) {
+                        if (col.getGrantOption()) {
+                          col.setGrantOption(false);
+                        } else {
+                          throw new MetaException("User " + userName
+                              + " does not have grant option with privilege " 
+ privilege);
+                        }
+                      }
                       persistentObjs.add(col);
                       break;
                     }
@@ -4083,7 +4143,12 @@ public class ObjectStore implements RawS
       }
 
       if (persistentObjs.size() > 0) {
-        pm.deletePersistentAll(persistentObjs);
+        if (grantOption) {
+          // If grant option specified, only update the privilege, don't 
remove it.
+          // Grant option has already been removed from the privileges in the 
section above
+        } else {
+          pm.deletePersistentAll(persistentObjs);
+        }
       }
       committed = commitTransaction();
     } finally {

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/RawStore.java
 Sat Jul 26 23:45:46 2014
@@ -230,8 +230,8 @@ public interface RawStore extends Config
       String grantor, PrincipalType grantorType, boolean grantOption)
       throws MetaException, NoSuchObjectException, InvalidObjectException;
 
-  public abstract boolean revokeRole(Role role, String userName, PrincipalType 
principalType)
-      throws MetaException, NoSuchObjectException;
+  public abstract boolean revokeRole(Role role, String userName, PrincipalType 
principalType,
+      boolean grantOption) throws MetaException, NoSuchObjectException;
 
   public abstract PrincipalPrivilegeSet getUserPrivilegeSet(String userName,
       List<String> groupNames) throws InvalidObjectException, MetaException;
@@ -273,7 +273,7 @@ public interface RawStore extends Config
   public abstract boolean grantPrivileges (PrivilegeBag privileges)
       throws InvalidObjectException, MetaException, NoSuchObjectException;
 
-  public abstract boolean revokePrivileges  (PrivilegeBag privileges)
+  public abstract boolean revokePrivileges  (PrivilegeBag privileges, boolean 
grantOption)
   throws InvalidObjectException, MetaException, NoSuchObjectException;
 
   public abstract org.apache.hadoop.hive.metastore.api.Role getRole(

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
 Sat Jul 26 23:45:46 2014
@@ -89,8 +89,7 @@ public class Warehouse {
     try {
       Class<? extends MetaStoreFS> handlerClass = (Class<? extends 
MetaStoreFS>) Class
           .forName(handlerClassStr, true, JavaUtils.getClassLoader());
-      MetaStoreFS handler = (MetaStoreFS) ReflectionUtils.newInstance(
-          handlerClass, conf);
+      MetaStoreFS handler = ReflectionUtils.newInstance(handlerClass, conf);
       return handler;
     } catch (ClassNotFoundException e) {
       throw new MetaException("Error in loading MetaStoreFS handler."
@@ -102,7 +101,7 @@ public class Warehouse {
   /**
    * Helper functions to convert IOException to MetaException
    */
-  public FileSystem getFs(Path f) throws MetaException {
+  public static FileSystem getFs(Path f, Configuration conf) throws 
MetaException {
     try {
       return f.getFileSystem(conf);
     } catch (IOException e) {
@@ -111,6 +110,10 @@ public class Warehouse {
     return null;
   }
 
+  public FileSystem getFs(Path f) throws MetaException {
+    return getFs(f, conf);
+  }
+
   public static void closeFs(FileSystem fs) throws MetaException {
     try {
       if (fs != null) {
@@ -135,12 +138,16 @@ public class Warehouse {
    *          Path to be canonicalized
    * @return Path with canonical scheme and authority
    */
-  public Path getDnsPath(Path path) throws MetaException {
-    FileSystem fs = getFs(path);
+  public static Path getDnsPath(Path path, Configuration conf) throws 
MetaException {
+    FileSystem fs = getFs(path, conf);
     return (new Path(fs.getUri().getScheme(), fs.getUri().getAuthority(), path
         .toUri().getPath()));
   }
 
+  public Path getDnsPath(Path path) throws MetaException {
+    return getDnsPath(path, conf);
+  }
+
   /**
    * Resolve the configured warehouse root dir with respect to the 
configuration
    * This involves opening the FileSystem corresponding to the warehouse root
@@ -174,7 +181,6 @@ public class Warehouse {
     return new Path(getWhRoot(), dbName.toLowerCase() + 
DATABASE_WAREHOUSE_SUFFIX);
   }
 
-
   public Path getTablePath(Database db, String tableName)
       throws MetaException {
     return getDnsPath(new Path(getDatabasePath(db), tableName.toLowerCase()));
@@ -556,4 +562,12 @@ public class Warehouse {
     return values;
   }
 
+  public static Map<String, String> makeSpecFromValues(List<FieldSchema> 
partCols,
+      List<String> values) {
+    Map<String, String> spec = new LinkedHashMap<String, String>();
+    for (int i = 0; i < values.size(); i++) {
+      spec.put(partCols.get(i).getName(), values.get(i));
+    }
+    return spec;
+  }
 }

Modified: 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 (original)
+++ 
hive/branches/spark/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
 Sat Jul 26 23:45:46 2014
@@ -727,7 +727,7 @@ public class TxnHandler {
   }
 
   public ShowCompactResponse showCompact(ShowCompactRequest rqst) throws 
MetaException {
-    ShowCompactResponse response = new ShowCompactResponse();
+    ShowCompactResponse response = new ShowCompactResponse(new 
ArrayList<ShowCompactResponseElement>());
     Connection dbConn = getDbConn(Connection.TRANSACTION_READ_COMMITTED);
     Statement stmt = null;
     try {

Modified: 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
 (original)
+++ 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreControlledCommit.java
 Sat Jul 26 23:45:46 2014
@@ -349,9 +349,9 @@ public class DummyRawStoreControlledComm
   }
 
   @Override
-  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType)
+  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType, boolean grantOption)
       throws MetaException, NoSuchObjectException {
-    return objectStore.revokeRole(role, userName, principalType);
+    return objectStore.revokeRole(role, userName, principalType, grantOption);
   }
 
   @Override
@@ -437,9 +437,9 @@ public class DummyRawStoreControlledComm
   }
 
   @Override
-  public boolean revokePrivileges(PrivilegeBag privileges) throws 
InvalidObjectException,
-      MetaException, NoSuchObjectException {
-    return objectStore.revokePrivileges(privileges);
+  public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
+      throws InvalidObjectException, MetaException, NoSuchObjectException {
+    return objectStore.revokePrivileges(privileges, grantOption);
   }
 
   @Override

Modified: 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
 (original)
+++ 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java
 Sat Jul 26 23:45:46 2014
@@ -368,7 +368,7 @@ public class DummyRawStoreForJdoConnecti
   }
 
   @Override
-  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType)
+  public boolean revokeRole(Role role, String userName, PrincipalType 
principalType, boolean grantOption)
       throws MetaException, NoSuchObjectException {
 
     return false;
@@ -462,8 +462,8 @@ public class DummyRawStoreForJdoConnecti
   }
 
   @Override
-  public boolean revokePrivileges(PrivilegeBag privileges) throws 
InvalidObjectException,
-      MetaException, NoSuchObjectException {
+  public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
+      throws InvalidObjectException, MetaException, NoSuchObjectException {
 
     return false;
   }

Modified: 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
 (original)
+++ 
hive/branches/spark/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
 Sat Jul 26 23:45:46 2014
@@ -17,15 +17,12 @@
  */
 package org.apache.hadoop.hive.metastore.txn;
 
-import junit.framework.Assert;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
 import org.junit.After;
 import org.junit.Before;
-import org.junit.Ignore;
 import org.junit.Test;
 
 import java.util.ArrayList;
@@ -200,7 +197,7 @@ public class TestCompactionTxnHandler {
     assertEquals(0, txnHandler.findReadyToClean().size());
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    assertNull(rsp.getCompacts());
+    assertEquals(0, rsp.getCompactsSize());
   }
 
   @Test

Modified: hive/branches/spark/ql/pom.xml
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/pom.xml?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/pom.xml (original)
+++ hive/branches/spark/ql/pom.xml Sat Jul 26 23:45:46 2014
@@ -500,6 +500,26 @@
               <goal>test-jar</goal>
             </goals>
           </execution>
+          <execution>
+            <!--this 'execution' will produce a hive-exec-$version-core.jar 
which has
+            all classes from ql module, i.e. the same contents as 
hive-exec-$version.jar would
+            have had if there was shade plugin below.  The way shade was 
configured since 0.13, is
+            to override the default jar for ql module with the saded one but 
keep the same name.
+            In other words, ql modules producesd a single uber jar under the 
module's coordinates,
+            i.e. w/o a classifier.  We also need a slim jar that only has 
classes from ql.  This
+            jar will now be with a 'core' classifier to ensure that prior 
behavior is preserved.
+            It would have been better to have a slim jar called 
hive-exec-$version.jar and a fat
+            jar called hive-exec-$version-shaded.jar but doing this now will 
cause backwards compat.
+            issues-->
+            <id>core-jar</id>
+            <phase>package</phase>
+            <goals>
+              <goal>jar</goal>
+            </goals>
+            <configuration>
+              <classifier>core</classifier>
+            </configuration>
+          </execution>
         </executions>
       </plugin>
       <plugin>
@@ -513,6 +533,7 @@
               <goal>shade</goal>
             </goals>
             <configuration>
+                <!--also see maven-jar-plugin execution.id=core-jar-->
               <artifactSet>
                 <includes>
                   <!-- order is meant to be the same as the ant build -->

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Context.java 
(original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Context.java Sat 
Jul 26 23:45:46 2014
@@ -124,12 +124,9 @@ public class Context {
 
     // local & non-local tmp location is configurable. however it is the same 
across
     // all external file systems
-    nonLocalScratchPath =
-      new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR),
-               executionId);
-    localScratchDir = new Path(HiveConf.getVar(conf, 
HiveConf.ConfVars.LOCALSCRATCHDIR),
-            executionId).toUri().getPath();
-    scratchDirPermission= HiveConf.getVar(conf, 
HiveConf.ConfVars.SCRATCHDIRPERMISSION);
+    nonLocalScratchPath = new Path(SessionState.getHDFSSessionPath(conf), 
executionId);
+    localScratchDir = new Path(SessionState.getLocalSessionPath(conf), 
executionId).toUri().getPath();
+    scratchDirPermission = HiveConf.getVar(conf, 
HiveConf.ConfVars.SCRATCHDIRPERMISSION);
   }
 
 

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
(original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Sat 
Jul 26 23:45:46 2014
@@ -101,6 +101,8 @@ import org.apache.hadoop.hive.ql.process
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
 import 
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext.CLIENT_TYPE;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivObjectActionType;
@@ -445,7 +447,7 @@ public class Driver implements CommandPr
 
         try {
           perfLogger.PerfLogBegin(CLASS_NAME, PerfLogger.DO_AUTHORIZATION);
-          doAuthorization(sem);
+          doAuthorization(sem, command);
         } catch (AuthorizationException authExp) {
           console.printError("Authorization failed:" + authExp.getMessage()
               + ". Use SHOW GRANT to get more details.");
@@ -483,15 +485,25 @@ public class Driver implements CommandPr
     }
   }
 
-  public static void doAuthorization(BaseSemanticAnalyzer sem)
+  /**
+   * Do authorization using post semantic analysis information in the semantic 
analyzer
+   * The original command is also passed so that authorization interface can 
provide
+   * more useful information in logs.
+   * @param sem
+   * @param command
+   * @throws HiveException
+   * @throws AuthorizationException
+   */
+  public static void doAuthorization(BaseSemanticAnalyzer sem, String command)
       throws HiveException, AuthorizationException {
     HashSet<ReadEntity> inputs = sem.getInputs();
     HashSet<WriteEntity> outputs = sem.getOutputs();
     SessionState ss = SessionState.get();
     HiveOperation op = ss.getHiveOperation();
     Hive db = sem.getDb();
+
     if (ss.isAuthorizationModeV2()) {
-      doAuthorizationV2(ss, op, inputs, outputs);
+      doAuthorizationV2(ss, op, inputs, outputs, command);
       return;
     }
     if (op == null) {
@@ -672,11 +684,20 @@ public class Driver implements CommandPr
   }
 
   private static void doAuthorizationV2(SessionState ss, HiveOperation op, 
HashSet<ReadEntity> inputs,
-      HashSet<WriteEntity> outputs) throws HiveException {
+      HashSet<WriteEntity> outputs, String command) throws HiveException {
+
+    HiveAuthzContext.Builder authzContextBuilder = new 
HiveAuthzContext.Builder();
+
+    authzContextBuilder.setClientType(ss.isHiveServerQuery() ? 
CLIENT_TYPE.HIVESERVER2
+        : CLIENT_TYPE.HIVECLI);
+    authzContextBuilder.setUserIpAddress(ss.getUserIpAddress());
+    authzContextBuilder.setSessionString(ss.getSessionId());
+    authzContextBuilder.setCommandString(command);
+
     HiveOperationType hiveOpType = getHiveOperationType(op);
     List<HivePrivilegeObject> inputsHObjs = getHivePrivObjects(inputs);
     List<HivePrivilegeObject> outputHObjs = getHivePrivObjects(outputs);
-    ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs);
+    ss.getAuthorizerV2().checkPrivileges(hiveOpType, inputsHObjs, outputHObjs, 
authzContextBuilder.build());
     return;
   }
 
@@ -703,18 +724,21 @@ public class Driver implements CommandPr
 
       //support for authorization on partitions needs to be added
       String dbname = null;
-      String tableURI = null;
+      String objName = null;
       switch(privObject.getType()){
       case DATABASE:
         dbname = privObject.getDatabase() == null ? null : 
privObject.getDatabase().getName();
         break;
       case TABLE:
         dbname = privObject.getTable() == null ? null : 
privObject.getTable().getDbName();
-        tableURI = privObject.getTable() == null ? null : 
privObject.getTable().getTableName();
+        objName = privObject.getTable() == null ? null : 
privObject.getTable().getTableName();
         break;
       case DFS_DIR:
       case LOCAL_DIR:
-        tableURI = privObject.getD();
+        objName = privObject.getD();
+        break;
+      case FUNCTION:
+        objName = privObject.getFunctionName();
         break;
       case DUMMYPARTITION:
       case PARTITION:
@@ -724,7 +748,7 @@ public class Driver implements CommandPr
           throw new AssertionError("Unexpected object type");
       }
       HivePrivObjectActionType actionType = 
AuthorizationUtils.getActionType(privObject);
-      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, 
dbname, tableURI,
+      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, 
dbname, objName,
           actionType);
       hivePrivobjs.add(hPrivObject);
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
(original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Sat 
Jul 26 23:45:46 2014
@@ -371,10 +371,14 @@ public enum ErrorMsg {
   INVALID_DIR(10252, "{0} is not a directory", true),
   NO_VALID_LOCATIONS(10253, "Could not find any valid location to place the 
jars. " +
       "Please update hive.jar.directory or hive.user.install.directory with a 
valid location", false),
-  UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
+  UNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP(10254,
       "Principal type GROUP is not supported in this authorization setting", 
"28000"),
   INVALID_TABLE_NAME(10255, "Invalid table name {0}", true),
   INSERT_INTO_IMMUTABLE_TABLE(10256, "Inserting into a non-empty immutable 
table is not allowed"),
+  UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_GLOBAL(10257,
+      "Resource type GLOBAL is not supported in this authorization setting", 
"28000"),
+  UNSUPPORTED_AUTHORIZATION_RESOURCE_TYPE_COLUMN(10258,
+      "Resource type COLUMN is not supported in this authorization setting", 
"28000"),
 
   TXNMGR_NOT_SPECIFIED(10260, "Transaction manager not specified correctly, " +
       "set hive.txn.manager"),

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java 
(original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java 
Sat Jul 26 23:45:46 2014
@@ -220,4 +220,8 @@ public class ColumnInfo implements Seria
 
     return true;
   }
+
+  public void setObjectinspector(ObjectInspector writableObjectInspector) {
+    this.objectInspector = writableObjectInspector;
+  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnStatsTask.java
 Sat Jul 26 23:45:46 2014
@@ -287,7 +287,8 @@ public class ColumnStatsTask extends Tas
       Table tbl = db.getTable(dbName,tableName);
       List<FieldSchema> partColSchema = tbl.getPartCols();
       // Partition columns are appended at end, we only care about stats column
-      for (int i = 0; i < fields.size() - partColSchema.size(); i++) {
+      int numOfStatCols = isTblLevel ? fields.size() : fields.size() - 
partColSchema.size();
+      for (int i = 0; i < numOfStatCols; i++) {
         // Get the field objectInspector, fieldName and the field object.
         ObjectInspector foi = fields.get(i).getFieldObjectInspector();
         Object f = (list == null ? null : list.get(i));

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
Sat Jul 26 23:45:46 2014
@@ -53,6 +53,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -65,17 +66,12 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.GetOpenTxnsInfoResponse;
-import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
-import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
-import org.apache.hadoop.hive.metastore.api.HiveObjectType;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
-import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.RolePrincipalGrant;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
@@ -150,6 +146,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
@@ -167,15 +164,13 @@ import org.apache.hadoop.hive.ql.plan.Un
 import org.apache.hadoop.hive.ql.plan.UnlockTableDesc;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
-import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeInfo;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
@@ -196,6 +191,7 @@ import org.apache.hadoop.hive.shims.Shim
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.ToolRunner;
+import org.apache.hive.common.util.AnnotationUtils;
 import org.stringtemplate.v4.ST;
 
 /**
@@ -430,6 +426,11 @@ public class DDLTask extends Task<DDLWor
         return showCreateTable(db, showCreateTbl);
       }
 
+      ShowConfDesc showConf = work.getShowConfDesc();
+      if (showConf != null) {
+        return showConf(db, showConf);
+      }
+
       RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc();
       if (roleDDLDesc != null) {
         return roleDDL(roleDDLDesc);
@@ -445,7 +446,8 @@ public class DDLTask extends Task<DDLWor
       RevokeDesc revokeDesc = work.getRevokeDesc();
       if (revokeDesc != null) {
         return grantOrRevokePrivileges(revokeDesc.getPrincipals(), revokeDesc
-            .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, 
null, false, false);
+            .getPrivileges(), revokeDesc.getPrivilegeSubjectDesc(), null, null,
+            revokeDesc.isGrantOption(), false);
       }
 
       ShowGrantDesc showGrantDesc = work.getShowGrantDesc();
@@ -484,14 +486,53 @@ public class DDLTask extends Task<DDLWor
         return exchangeTablePartition(db, alterTableExchangePartition);
       }
     } catch (Throwable e) {
-      setException(e);
-      LOG.error(stringifyException(e));
+      failed(e);
       return 1;
     }
     assert false;
     return 0;
   }
 
+  private void failed(Throwable e) {
+    while (e.getCause() != null && e.getClass() == RuntimeException.class) {
+      e = e.getCause();
+    }
+    setException(e);
+    LOG.error(stringifyException(e));
+  }
+
+  private int showConf(Hive db, ShowConfDesc showConf) throws Exception {
+    ConfVars conf = HiveConf.getConfVars(showConf.getConfName());
+    if (conf == null) {
+      throw new HiveException("invalid configuration name " + 
showConf.getConfName());
+    }
+    String description = conf.getDescription();
+    String defaltValue = conf.getDefaultValue();
+    DataOutputStream output = getOutputStream(showConf.getResFile());
+    try {
+      if (description != null) {
+        if (defaltValue != null) {
+          output.write(defaltValue.getBytes());
+        }
+        output.write(separator);
+        output.write(conf.typeString().getBytes());
+        output.write(separator);
+        if (description != null) {
+          output.write(description.replaceAll(" *\n *", " ").getBytes());
+        }
+        output.write(terminator);
+      }
+    } finally {
+      output.close();
+    }
+    return 0;
+  }
+
+  private DataOutputStream getOutputStream(Path outputFile) throws Exception {
+    FileSystem fs = outputFile.getFileSystem(conf);
+    return fs.create(outputFile);
+  }
+
   /**
    * First, make sure the source table/partition is not
    * archived/indexes/non-rcfile. If either of these is true, throw an
@@ -523,186 +564,46 @@ public class DDLTask extends Task<DDLWor
     return ret;
   }
 
-  private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
-      throws HiveException {
-    try {
-      boolean grantRole = grantOrRevokeRoleDDL.getGrant();
-      List<PrincipalDesc> principals = grantOrRevokeRoleDDL.getPrincipalDesc();
-      List<String> roles = grantOrRevokeRoleDDL.getRoles();
-
-      if(SessionState.get().isAuthorizationModeV2()){
-        return grantOrRevokeRoleV2(grantOrRevokeRoleDDL);
-      }
-
-      for (PrincipalDesc principal : principals) {
-        String userName = principal.getName();
-        for (String roleName : roles) {
-          if (grantRole) {
-            db.grantRole(roleName, userName, principal.getType(),
-                grantOrRevokeRoleDDL.getGrantor(), grantOrRevokeRoleDDL
-                .getGrantorType(), grantOrRevokeRoleDDL.isGrantOption());
-          } else {
-            db.revokeRole(roleName, userName, principal.getType());
-          }
-        }
-      }
-    } catch (Exception e) {
-      throw new HiveException(e);
+  private HiveAuthorizer getSessionAuthorizer() {
+    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    if (authorizer == null) {
+      authorizer = new HiveV1Authorizer(conf, db);
     }
-    return 0;
+    return authorizer;
   }
 
-  private int grantOrRevokeRoleV2(GrantRevokeRoleDDL grantOrRevokeRoleDDL) 
throws HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+  private int grantOrRevokeRole(GrantRevokeRoleDDL grantOrRevokeRoleDDL)
+      throws HiveException {
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     //convert to the types needed for plugin api
     HivePrincipal grantorPrinc = null;
     if(grantOrRevokeRoleDDL.getGrantor() != null){
       grantorPrinc = new HivePrincipal(grantOrRevokeRoleDDL.getGrantor(),
-          getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
+          
AuthorizationUtils.getHivePrincipalType(grantOrRevokeRoleDDL.getGrantorType()));
     }
-    List<HivePrincipal> hivePrincipals = 
getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
+    List<HivePrincipal> principals =
+        
AuthorizationUtils.getHivePrincipals(grantOrRevokeRoleDDL.getPrincipalDesc());
     List<String> roles = grantOrRevokeRoleDDL.getRoles();
 
-    if(grantOrRevokeRoleDDL.getGrant()){
-      authorizer.grantRole(hivePrincipals, roles,
-          grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
-    }
-    else{
-      authorizer.revokeRole(hivePrincipals, roles,
-          grantOrRevokeRoleDDL.isGrantOption(), grantorPrinc);
+    boolean grantOption = grantOrRevokeRoleDDL.isGrantOption();
+    if (grantOrRevokeRoleDDL.getGrant()) {
+      authorizer.grantRole(principals, roles, grantOption, grantorPrinc);
+    } else {
+      authorizer.revokeRole(principals, roles, grantOption, grantorPrinc);
     }
     return 0;
   }
 
   private int showGrants(ShowGrantDesc showGrantDesc) throws HiveException {
 
-    if(SessionState.get().isAuthorizationModeV2()){
-      return showGrantsV2(showGrantDesc);
-    }
-
-    PrincipalDesc principalDesc = showGrantDesc.getPrincipalDesc();
-    PrivilegeObjectDesc hiveObjectDesc = showGrantDesc.getHiveObj();
-    String principalName = principalDesc == null ? null : 
principalDesc.getName();
-    PrincipalType type = principalDesc == null ? null : 
principalDesc.getType();
-    List<HiveObjectPrivilege> privs = new ArrayList<HiveObjectPrivilege>();
-    try {
-      if (hiveObjectDesc == null) {
-        privs.addAll(db.showPrivilegeGrant(HiveObjectType.GLOBAL, 
principalName, type,
-            null, null, null, null));
-      } else if (hiveObjectDesc != null && hiveObjectDesc.getObject() == null) 
{
-        privs.addAll(db.showPrivilegeGrant(null, principalName, type, null, 
null, null, null));
-      } else {
-        String obj = hiveObjectDesc.getObject();
-        boolean notFound = true;
-        String dbName = null;
-        String tableName = null;
-        Table tableObj = null;
-        Database dbObj = null;
-
-        if (hiveObjectDesc.getTable()) {
-          String[] dbTab = splitTableName(obj);
-          dbName = dbTab[0];
-          tableName = dbTab[1];
-          dbObj = db.getDatabase(dbName);
-          tableObj = db.getTable(dbName, tableName);
-          notFound = (dbObj == null || tableObj == null);
-        } else {
-          dbName = hiveObjectDesc.getObject();
-          dbObj = db.getDatabase(dbName);
-          notFound = (dbObj == null);
-        }
-        if (notFound) {
-          throw new HiveException(obj + " can not be found");
-        }
-
-        String partName = null;
-        List<String> partValues = null;
-        if (hiveObjectDesc.getPartSpec() != null) {
-          partName = Warehouse
-              .makePartName(hiveObjectDesc.getPartSpec(), false);
-          partValues = Warehouse.getPartValuesFromPartName(partName);
-        }
-
-        if (!hiveObjectDesc.getTable()) {
-          // show database level privileges
-          privs.addAll(db.showPrivilegeGrant(HiveObjectType.DATABASE,
-              principalName, type, dbName, null, null, null));
-        } else {
-          if (showGrantDesc.getColumns() != null) {
-            // show column level privileges
-            for (String columnName : showGrantDesc.getColumns()) {
-              privs.addAll(db.showPrivilegeGrant(
-                  HiveObjectType.COLUMN, principalName,
-                  type, dbName, tableName, partValues,
-                  columnName));
-            }
-          } else if (hiveObjectDesc.getPartSpec() != null) {
-            // show partition level privileges
-            privs.addAll(db.showPrivilegeGrant(
-                HiveObjectType.PARTITION, principalName, type,
-                dbName, tableName, partValues, null));
-          } else {
-            // show table level privileges
-            privs.addAll(db.showPrivilegeGrant(
-                HiveObjectType.TABLE, principalName, type,
-                dbName, tableName, null, null));
-          }
-        }
-      }
-      boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      writeToFile(writeGrantInfo(privs, testMode), showGrantDesc.getResFile());
-    } catch (FileNotFoundException e) {
-      LOG.info("show table status: " + stringifyException(e));
-      return 1;
-    } catch (IOException e) {
-      LOG.info("show table status: " + stringifyException(e));
-      return 1;
-    } catch (Exception e) {
-      e.printStackTrace();
-      throw new HiveException(e);
-    }
-    return 0;
-  }
-
-  private static String[] splitTableName(String fullName) {
-    String[] dbTab = fullName.split("\\.");
-    String[] result = new String[2];
-    if (dbTab.length == 2) {
-      result[0] = dbTab[0];
-      result[1] = dbTab[1];
-    } else {
-      result[0] = SessionState.get().getCurrentDatabase();
-      result[1] = fullName;
-    }
-    return result;
-  }
-
-  private int showGrantsV2(ShowGrantDesc showGrantDesc) throws HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     try {
       List<HivePrivilegeInfo> privInfos = authorizer.showPrivileges(
-          getHivePrincipal(showGrantDesc.getPrincipalDesc()),
-          getHivePrivilegeObject(showGrantDesc.getHiveObj())
-          );
-      List<HiveObjectPrivilege> privList = new 
ArrayList<HiveObjectPrivilege>();
-      for(HivePrivilegeInfo privInfo : privInfos){
-        HivePrincipal principal = privInfo.getPrincipal();
-        HivePrivilegeObject privObj = privInfo.getObject();
-        HivePrivilege priv = privInfo.getPrivilege();
-
-        PrivilegeGrantInfo grantInfo =
-            AuthorizationUtils.getThriftPrivilegeGrantInfo(priv, 
privInfo.getGrantorPrincipal(),
-                privInfo.isGrantOption(), privInfo.getGrantTime());
-
-        //only grantInfo is used
-        HiveObjectPrivilege thriftObjectPriv = new HiveObjectPrivilege(new 
HiveObjectRef(
-            
AuthorizationUtils.getThriftHiveObjType(privObj.getType()),privObj.getDbname(),
-            privObj.getTableViewURI(),null,null), principal.getName(),
-            AuthorizationUtils.getThriftPrincipalType(principal.getType()), 
grantInfo);
-        privList.add(thriftObjectPriv);
-      }
+          
AuthorizationUtils.getHivePrincipal(showGrantDesc.getPrincipalDesc()),
+          AuthorizationUtils.getHivePrivilegeObject(showGrantDesc.getHiveObj(),
+              showGrantDesc.getColumns()));
       boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-      writeToFile(writeGrantInfo(privList, testMode), 
showGrantDesc.getResFile());
+      writeToFile(writeGrantInfo(privInfos, testMode), 
showGrantDesc.getResFile());
     } catch (IOException e) {
       throw new HiveException("Error in show grant statement", e);
     }
@@ -714,155 +615,15 @@ public class DDLTask extends Task<DDLWor
       String grantor, PrincipalType grantorType, boolean grantOption, boolean 
isGrant)
           throws HiveException {
 
-    if(SessionState.get().isAuthorizationModeV2()){
-      return grantOrRevokePrivilegesV2(principals, privileges, 
privSubjectDesc, grantor,
-          grantorType, grantOption, isGrant);
-    }
-
-    if (privileges == null || privileges.size() == 0) {
-      console.printError("No privilege found.");
-      return 1;
-    }
-
-    String dbName = null;
-    String tableName = null;
-    Table tableObj = null;
-    Database dbObj = null;
-
-    try {
-
-      if (privSubjectDesc != null) {
-        if (privSubjectDesc.getPartSpec() != null && isGrant) {
-          throw new HiveException("Grant does not support partition level.");
-        }
-        String obj = privSubjectDesc.getObject();
-
-        //get the db, table objects
-        if (privSubjectDesc.getTable()) {
-          String[] dbTable = Utilities.getDbTableName(obj);
-          dbName = dbTable[0];
-          tableName = dbTable[1];
-
-          dbObj = db.getDatabase(dbName);
-          if (dbObj == null) {
-            throwNotFound("Database", dbName);
-          }
-          tableObj = db.getTable(dbName, tableName);
-          if (tableObj == null) {
-            throwNotFound("Table", obj);
-          }
-        } else {
-          dbName = privSubjectDesc.getObject();
-          dbObj = db.getDatabase(dbName);
-          if (dbObj == null) {
-            throwNotFound("Database", dbName);
-          }
-        }
-      }
-
-      PrivilegeBag privBag = new PrivilegeBag();
-      if (privSubjectDesc == null) {
-        for (int idx = 0; idx < privileges.size(); idx++) {
-          Privilege priv = privileges.get(idx).getPrivilege();
-          if (privileges.get(idx).getColumns() != null
-              && privileges.get(idx).getColumns().size() > 0) {
-            throw new HiveException(
-                "For user-level privileges, column sets should be null. 
columns="
-                    + privileges.get(idx).getColumns().toString());
-          }
-
-          privBag.addToPrivileges(new HiveObjectPrivilege(new HiveObjectRef(
-              HiveObjectType.GLOBAL, null, null, null, null), null, null,
-              new PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType,
-                  grantOption)));
-        }
-      } else {
-        org.apache.hadoop.hive.metastore.api.Partition partObj = null;
-        List<String> partValues = null;
-        if (tableObj != null) {
-          if ((!tableObj.isPartitioned())
-              && privSubjectDesc.getPartSpec() != null) {
-            throw new HiveException(
-                "Table is not partitioned, but partition name is present: 
partSpec="
-                    + privSubjectDesc.getPartSpec().toString());
-          }
-
-          if (privSubjectDesc.getPartSpec() != null) {
-            partObj = db.getPartition(tableObj, privSubjectDesc.getPartSpec(),
-                false).getTPartition();
-            partValues = partObj.getValues();
-          }
-        }
-
-        for (PrivilegeDesc privDesc : privileges) {
-          List<String> columns = privDesc.getColumns();
-          Privilege priv = privDesc.getPrivilege();
-          if (columns != null && columns.size() > 0) {
-            if (!priv.supportColumnLevel()) {
-              throw new HiveException(priv.toString()
-                  + " does not support column level.");
-            }
-            if (privSubjectDesc == null || tableName == null) {
-              throw new HiveException(
-                  "For user-level/database-level privileges, column sets 
should be null. columns="
-                      + columns);
-            }
-            for (int i = 0; i < columns.size(); i++) {
-              privBag.addToPrivileges(new HiveObjectPrivilege(
-                  new HiveObjectRef(HiveObjectType.COLUMN, dbName, tableName,
-                      partValues, columns.get(i)), null, null,  new 
PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-            }
-          } else {
-            if (privSubjectDesc.getTable()) {
-              if (privSubjectDesc.getPartSpec() != null) {
-                privBag.addToPrivileges(new HiveObjectPrivilege(
-                    new HiveObjectRef(HiveObjectType.PARTITION, dbName,
-                        tableName, partValues, null), null, null,  new 
PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-              } else {
-                privBag
-                .addToPrivileges(new HiveObjectPrivilege(
-                    new HiveObjectRef(HiveObjectType.TABLE, dbName,
-                        tableName, null, null), null, null, new 
PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-              }
-            } else {
-              privBag.addToPrivileges(new HiveObjectPrivilege(
-                  new HiveObjectRef(HiveObjectType.DATABASE, dbName, null,
-                      null, null), null, null, new 
PrivilegeGrantInfo(priv.toString(), 0, grantor, grantorType, grantOption)));
-            }
-          }
-        }
-      }
-
-      for (PrincipalDesc principal : principals) {
-        for (int i = 0; i < privBag.getPrivileges().size(); i++) {
-          HiveObjectPrivilege objPrivs = privBag.getPrivileges().get(i);
-          objPrivs.setPrincipalName(principal.getName());
-          objPrivs.setPrincipalType(principal.getType());
-        }
-        if (isGrant) {
-          db.grantPrivileges(privBag);
-        } else {
-          db.revokePrivileges(privBag);
-        }
-      }
-    } catch (Exception e) {
-      console.printError("Error: " + e.getMessage());
-      return 1;
-    }
-
-    return 0;
-  }
-
-  private int grantOrRevokePrivilegesV2(List<PrincipalDesc> principals,
-      List<PrivilegeDesc> privileges, PrivilegeObjectDesc privSubjectDesc, 
String grantor,
-      PrincipalType grantorType, boolean grantOption, boolean isGrant) throws 
HiveException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+    HiveAuthorizer authorizer = getSessionAuthorizer();
 
     //Convert to object types used by the authorization plugin interface
-    List<HivePrincipal> hivePrincipals = getHivePrincipals(principals);
-    List<HivePrivilege> hivePrivileges = getHivePrivileges(privileges);
-    HivePrivilegeObject hivePrivObject = 
getHivePrivilegeObject(privSubjectDesc);
-    HivePrincipal grantorPrincipal = new HivePrincipal(grantor, 
getHivePrincipalType(grantorType));
+    List<HivePrincipal> hivePrincipals = 
AuthorizationUtils.getHivePrincipals(principals);
+    List<HivePrivilege> hivePrivileges = 
AuthorizationUtils.getHivePrivileges(privileges);
+    HivePrivilegeObject hivePrivObject = 
AuthorizationUtils.getHivePrivilegeObject(privSubjectDesc, null);
+
+    HivePrincipal grantorPrincipal = new HivePrincipal(
+        grantor, AuthorizationUtils.getHivePrincipalType(grantorType));
 
     if(isGrant){
       authorizer.grantPrivileges(hivePrincipals, hivePrivileges, 
hivePrivObject,
@@ -875,123 +636,8 @@ public class DDLTask extends Task<DDLWor
     return 0;
   }
 
-  private HivePrivilegeObject getHivePrivilegeObject(PrivilegeObjectDesc 
privSubjectDesc)
-      throws HiveException {
-    if(privSubjectDesc == null){
-      return new HivePrivilegeObject(null, null, null);
-    }
-    String [] dbTable = Utilities.getDbTableName(privSubjectDesc.getObject());
-    return new HivePrivilegeObject(getPrivObjectType(privSubjectDesc), 
dbTable[0], dbTable[1]);
-  }
-
-  private HivePrincipalType getHivePrincipalType(PrincipalType type) throws 
HiveException {
-    if(type == null){
-      return null;
-    }
-
-    switch(type){
-    case USER:
-      return HivePrincipalType.USER;
-    case ROLE:
-      return HivePrincipalType.ROLE;
-    case GROUP:
-      throw new 
HiveException(ErrorMsg.UNNSUPPORTED_AUTHORIZATION_PRINCIPAL_TYPE_GROUP);
-    default:
-      //should not happen as we take care of all existing types
-      throw new AssertionError("Unsupported authorization type specified");
-    }
-  }
-
-  private HivePrivilegeObjectType getPrivObjectType(PrivilegeObjectDesc 
privSubjectDesc) {
-    if (privSubjectDesc.getObject() == null) {
-      return null;
-    }
-    return privSubjectDesc.getTable() ? HivePrivilegeObjectType.TABLE_OR_VIEW 
: HivePrivilegeObjectType.DATABASE;
-  }
-
-  private List<HivePrivilege> getHivePrivileges(List<PrivilegeDesc> 
privileges) {
-    List<HivePrivilege> hivePrivileges = new ArrayList<HivePrivilege>();
-    for(PrivilegeDesc privilege : privileges){
-      hivePrivileges.add(
-          new HivePrivilege(privilege.getPrivilege().toString(), 
privilege.getColumns()));
-    }
-    return hivePrivileges;
-  }
-
-  private List<HivePrincipal> getHivePrincipals(List<PrincipalDesc> 
principals) throws HiveException {
-    ArrayList<HivePrincipal> hivePrincipals = new ArrayList<HivePrincipal>();
-    for(PrincipalDesc principal : principals){
-      hivePrincipals.add(getHivePrincipal(principal));
-    }
-    return hivePrincipals;
-  }
-
-  private HivePrincipal getHivePrincipal(PrincipalDesc principal) throws 
HiveException {
-    if (principal == null) {
-      return null;
-    }
-    return new HivePrincipal(principal.getName(),
-        AuthorizationUtils.getHivePrincipalType(principal.getType()));
-  }
-
-  private void throwNotFound(String objType, String objName) throws 
HiveException {
-    throw new HiveException(objType + " " + objName + " not found");
-  }
-
-  private int roleDDL(RoleDDLDesc roleDDLDesc) throws HiveException, 
IOException {
-    if(SessionState.get().isAuthorizationModeV2()){
-      return roleDDLV2(roleDDLDesc);
-    }
-
-    DataOutputStream outStream = null;
-    RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
-    try {
-      if (operation.equals(RoleDDLDesc.RoleOperation.CREATE_ROLE)) {
-        db.createRole(roleDDLDesc.getName(), roleDDLDesc.getRoleOwnerName());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.DROP_ROLE)) {
-        db.dropRole(roleDDLDesc.getName());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT)) {
-        boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
-        List<RolePrincipalGrant> roleGrants = 
db.getRoleGrantInfoForPrincipal(roleDDLDesc.getName(), 
roleDDLDesc.getPrincipalType());
-        writeToFile(writeRoleGrantsInfo(roleGrants, testMode), 
roleDDLDesc.getResFile());
-      } else if (operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLES)) {
-        List<String> roleNames = db.getAllRoleNames();
-        //sort the list to get sorted (deterministic) output (for ease of 
testing)
-        Collections.sort(roleNames);
-        Path resFile = new Path(roleDDLDesc.getResFile());
-        FileSystem fs = resFile.getFileSystem(conf);
-        outStream = fs.create(resFile);
-        for (String roleName : roleNames) {
-          outStream.writeBytes(roleName);
-          outStream.write(terminator);
-        }
-        outStream.close();
-        outStream = null;
-      } else if 
(operation.equals(RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS)) {
-        throw new HiveException("Show role principals is not currently 
supported in "
-            + "authorization mode V1");
-      }
-      else {
-        throw new HiveException("Unkown role operation "
-            + operation.getOperationName());
-      }
-    } catch (HiveException e) {
-      console.printError("Error in role operation "
-          + operation.getOperationName() + " on role name "
-          + roleDDLDesc.getName() + ", error message " + e.getMessage());
-      return 1;
-    } catch (IOException e) {
-      LOG.info("role ddl exception: " + stringifyException(e));
-      return 1;
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-
-    return 0;
-  }
-
-  private int roleDDLV2(RoleDDLDesc roleDDLDesc) throws HiveException, 
IOException {
-    HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+  private int roleDDL(RoleDDLDesc roleDDLDesc) throws Exception {
+    HiveAuthorizer authorizer = getSessionAuthorizer();
     RoleDDLDesc.RoleOperation operation = roleDDLDesc.getOperation();
     //call the appropriate hive authorizer function
     switch(operation){
@@ -1004,7 +650,7 @@ public class DDLTask extends Task<DDLWor
     case SHOW_ROLE_GRANT:
       boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVE_IN_TEST);
       List<HiveRoleGrant> roles = authorizer.getRoleGrantInfoForPrincipal(
-          new HivePrincipal(roleDDLDesc.getName(), 
getHivePrincipalType(roleDDLDesc.getPrincipalType())));
+          AuthorizationUtils.getHivePrincipal(roleDDLDesc.getName(), 
roleDDLDesc.getPrincipalType()));
       writeToFile(writeRolesGrantedInfo(roles, testMode), 
roleDDLDesc.getResFile());
       break;
     case SHOW_ROLES:
@@ -2193,6 +1839,7 @@ public class DDLTask extends Task<DDLWor
   private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) 
throws HiveException {
     // get the create table statement for the table and populate the output
     final String EXTERNAL = "external";
+    final String TEMPORARY = "temporary";
     final String LIST_COLUMNS = "columns";
     final String TBL_COMMENT = "tbl_comment";
     final String LIST_PARTITIONS = "partitions";
@@ -2224,7 +1871,7 @@ public class DDLTask extends Task<DDLWor
         return 0;
       }
 
-      createTab_str.append("CREATE <" + EXTERNAL + "> TABLE `");
+      createTab_str.append("CREATE <" + TEMPORARY + "><" + EXTERNAL + ">TABLE 
`");
       createTab_str.append(tableName + "`(\n");
       createTab_str.append("<" + LIST_COLUMNS + ">)\n");
       createTab_str.append("<" + TBL_COMMENT + ">\n");
@@ -2239,11 +1886,17 @@ public class DDLTask extends Task<DDLWor
       createTab_str.append("<" + TBL_PROPERTIES + ">)\n");
       ST createTab_stmt = new ST(createTab_str.toString());
 
+      // For cases where the table is temporary
+      String tbl_temp = "";
+      if (tbl.isTemporary()) {
+        duplicateProps.add("TEMPORARY");
+        tbl_temp = "TEMPORARY ";
+      }
       // For cases where the table is external
       String tbl_external = "";
       if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
         duplicateProps.add("EXTERNAL");
-        tbl_external = "EXTERNAL";
+        tbl_external = "EXTERNAL ";
       }
 
       // Columns
@@ -2399,6 +2052,7 @@ public class DDLTask extends Task<DDLWor
         tbl_properties += StringUtils.join(realProps, ", \n");
       }
 
+      createTab_stmt.add(TEMPORARY, tbl_temp);
       createTab_stmt.add(EXTERNAL, tbl_external);
       createTab_stmt.add(LIST_COLUMNS, tbl_columns);
       createTab_stmt.add(TBL_COMMENT, tbl_comment);
@@ -2751,7 +2405,7 @@ public class DDLTask extends Task<DDLWor
       LOG.warn("show function: " + stringifyException(e));
       return 1;
     } catch (Exception e) {
-      throw new HiveException(e.toString());
+      throw new HiveException(e.toString(), e);
     } finally {
       IOUtils.closeStream(outStream);
     }
@@ -3164,7 +2818,7 @@ public class DDLTask extends Task<DDLWor
         funcClass = functionInfo.getFunctionClass();
       }
       if (funcClass != null) {
-        desc = funcClass.getAnnotation(Description.class);
+        desc = AnnotationUtils.getAnnotation(funcClass, Description.class);
       }
       if (desc != null) {
         outStream.writeBytes(desc.value().replace("_FUNC_", funcName));
@@ -3440,7 +3094,7 @@ public class DDLTask extends Task<DDLWor
           // when column name is specified in describe table DDL, colPath will
           // will be table_name.column_name
           String colName = colPath.split("\\.")[1];
-          String[] dbTab = splitTableName(tableName);
+          String[] dbTab = Utilities.getDbTableName(tableName);
           List<String> colNames = new ArrayList<String>();
           colNames.add(colName.toLowerCase());
           if (null == part) {
@@ -3490,28 +3144,41 @@ public class DDLTask extends Task<DDLWor
     }
   }
 
-  static String writeGrantInfo(List<HiveObjectPrivilege> privileges, boolean 
testMode) {
+  static String writeGrantInfo(List<HivePrivilegeInfo> privileges, boolean 
testMode) {
     if (privileges == null || privileges.isEmpty()) {
       return "";
     }
     StringBuilder builder = new StringBuilder();
     //sort the list to get sorted (deterministic) output (for ease of testing)
-    Collections.sort(privileges);
-
-    for (HiveObjectPrivilege privilege : privileges) {
-      HiveObjectRef resource = privilege.getHiveObject();
-      PrivilegeGrantInfo grantInfo = privilege.getGrantInfo();
+    Collections.sort(privileges, new Comparator<HivePrivilegeInfo>() {
+      @Override
+      public int compare(HivePrivilegeInfo o1, HivePrivilegeInfo o2) {
+        int compare = o1.getObject().compareTo(o2.getObject());
+        if (compare == 0) {
+          compare = o1.getPrincipal().compareTo(o2.getPrincipal());
+        }
+        if (compare == 0) {
+          compare = o1.getPrivilege().compareTo(o2.getPrivilege());
+        }
+        return compare;
+      }
+    });
+
+    for (HivePrivilegeInfo privilege : privileges) {
+      HivePrincipal principal = privilege.getPrincipal();
+      HivePrivilegeObject resource = privilege.getObject();
+      HivePrincipal grantor = privilege.getGrantorPrincipal();
 
-      appendNonNull(builder, resource.getDbName(), true);
+      appendNonNull(builder, resource.getDbname(), true);
       appendNonNull(builder, resource.getObjectName());
-      appendNonNull(builder, resource.getPartValues());
-      appendNonNull(builder, resource.getColumnName());
-      appendNonNull(builder, privilege.getPrincipalName());
-      appendNonNull(builder, privilege.getPrincipalType());
-      appendNonNull(builder, grantInfo.getPrivilege());
-      appendNonNull(builder, grantInfo.isGrantOption());
-      appendNonNull(builder, testMode ? -1 : grantInfo.getCreateTime() * 
1000L);
-      appendNonNull(builder, grantInfo.getGrantor());
+      appendNonNull(builder, resource.getPartKeys());
+      appendNonNull(builder, resource.getColumns());
+      appendNonNull(builder, principal.getName());
+      appendNonNull(builder, principal.getType());
+      appendNonNull(builder, privilege.getPrivilege().getName());
+      appendNonNull(builder, privilege.isGrantOption());
+      appendNonNull(builder, testMode ? -1 : privilege.getGrantTime() * 1000L);
+      appendNonNull(builder, grantor.getName());
     }
     return builder.toString();
   }
@@ -3745,8 +3412,7 @@ public class DDLTask extends Task<DDLWor
           tbl.getTTable().getSd().getSerdeInfo().getParameters().putAll(
               alterTbl.getProps());
         }
-        if 
(!conf.getStringCollection(ConfVars.SERDESUSINGMETASTOREFORSCHEMA.varname)
-            .contains(serdeName)) {
+        if (!Table.hasMetastoreBasedSchema(conf, serdeName)) {
           tbl.setFields(Hive.getFieldsFromDeserializer(tbl.getTableName(), tbl.
               getDeserializer()));
         }
@@ -4246,6 +3912,8 @@ public class DDLTask extends Task<DDLWor
       tbl.setSkewedColValues(crtTbl.getSkewedColValues());
     }
 
+    tbl.getTTable().setTemporary(crtTbl.isTemporary());
+
     tbl.setStoredAsSubDirectories(crtTbl.isStoredAsSubDirectories());
 
     tbl.setInputFormatClass(crtTbl.getInputFormat());
@@ -4393,6 +4061,8 @@ public class DDLTask extends Task<DDLWor
         params.putAll(crtTbl.getTblProps());
       }
 
+      tbl.getTTable().setTemporary(crtTbl.isTemporary());
+
       if (crtTbl.isExternal()) {
         tbl.setProperty("EXTERNAL", "TRUE");
         tbl.setTableType(TableType.EXTERNAL_TABLE);
@@ -4517,24 +4187,49 @@ public class DDLTask extends Task<DDLWor
   }
 
   private List<Path> getLocations(Hive db, Table table, Map<String, String> 
partSpec)
-      throws HiveException {
+      throws HiveException, InvalidOperationException {
     List<Path> locations = new ArrayList<Path>();
     if (partSpec == null) {
       if (table.isPartitioned()) {
         for (Partition partition : db.getPartitions(table)) {
           locations.add(partition.getDataLocation());
+          if (needToUpdateStats(partition.getParameters())) {
+            db.alterPartition(table.getDbName(), table.getTableName(), 
partition);
+          }
         }
       } else {
         locations.add(table.getPath());
+        if (needToUpdateStats(table.getParameters())) {
+          db.alterTable(table.getDbName()+"."+table.getTableName(), table);
+        }
       }
     } else {
       for (Partition partition : db.getPartitionsByNames(table, partSpec)) {
         locations.add(partition.getDataLocation());
+        if (needToUpdateStats(partition.getParameters())) {
+          db.alterPartition(table.getDbName(), table.getTableName(), 
partition);
+        }
       }
     }
     return locations;
   }
 
+  private boolean needToUpdateStats(Map<String,String> props) {
+    if (null == props) {
+      return false;
+    }
+    boolean statsPresent = false;
+    for (String stat : StatsSetupConst.supportedStats) {
+      String statVal = props.get(stat);
+      if (statVal != null && Long.parseLong(statVal) > 0) {
+        statsPresent = true;
+        props.put(statVal, "0");
+        props.put(StatsSetupConst.COLUMN_STATS_ACCURATE, "false");
+      }
+    }
+    return statsPresent;
+  }
+
   private String escapeHiveCommand(String str) {
     StringBuilder sb = new StringBuilder();
     for (int i = 0; i < str.length(); i ++) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java 
(original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java 
Sat Jul 26 23:45:46 2014
@@ -32,8 +32,8 @@ import java.util.Collection;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.HashSet;
-import java.util.List;
 import java.util.LinkedList;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
@@ -44,21 +44,20 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.metadata.AuthorizationException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.optimizer.physical.StageIDsRearranger;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
-import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+import org.apache.hadoop.hive.ql.plan.TezWork;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationFactory;
-import 
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.util.StringUtils;
+import org.apache.hive.common.util.AnnotationUtils;
 import org.json.JSONArray;
 import org.json.JSONException;
 import org.json.JSONObject;
@@ -70,7 +69,7 @@ import org.json.JSONObject;
 public class ExplainTask extends Task<ExplainWork> implements Serializable {
   private static final long serialVersionUID = 1L;
   public static final String EXPL_COLUMN_NAME = "Explain";
-  private Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
+  private final Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
   private boolean isLogical = false;
 
   public ExplainTask() {
@@ -144,7 +143,9 @@ public class ExplainTask extends Task<Ex
     }
 
     if (work.getParseContext() != null) {
-      out.print("LOGICAL PLAN:");
+      if (out != null) {
+        out.print("LOGICAL PLAN:");
+      }
       JSONObject jsonPlan = outputMap(work.getParseContext().getTopOps(), true,
                                       out, jsonOutput, work.getExtended(), 0);
       if (out != null) {
@@ -168,7 +169,7 @@ public class ExplainTask extends Task<Ex
 
   public JSONObject getJSONPlan(PrintStream out, String ast, List<Task<?>> 
tasks, Task<?> fetchTask,
       boolean jsonOutput, boolean isExtended, boolean appendTaskType) throws 
Exception {
-    
+
     // If the user asked for a formatted output, dump the json output
     // in the output stream
     JSONObject outJSONObject = new JSONObject();
@@ -334,23 +335,25 @@ public class ExplainTask extends Task<Ex
     if (analyzer.skipAuthorization()) {
       return object;
     }
-    HiveAuthorizationProvider delegate = SessionState.get().getAuthorizer();
 
     final List<String> exceptions = new ArrayList<String>();
-    HiveAuthorizationProvider authorizer = 
AuthorizationFactory.create(delegate,
-        new AuthorizationFactory.AuthorizationExceptionHandler() {
-          public void exception(AuthorizationException exception) {
-            exceptions.add(exception.getMessage());
-          }
-        });
-
-    SessionState.get().setAuthorizer(authorizer);
-    try {
-      Driver.doAuthorization(analyzer);
-    } finally {
-      SessionState.get().setAuthorizer(delegate);
+    Object delegate = SessionState.get().getActiveAuthorizer();
+    if (delegate != null) {
+      Class itface = SessionState.get().getAuthorizerInterface();
+      Object authorizer = AuthorizationFactory.create(delegate, itface,
+          new AuthorizationFactory.AuthorizationExceptionHandler() {
+            public void exception(Exception exception) {
+              exceptions.add(exception.getMessage());
+            }
+          });
+
+      SessionState.get().setActiveAuthorizer(authorizer);
+      try {
+        Driver.doAuthorization(analyzer, "");
+      } finally {
+        SessionState.get().setActiveAuthorizer(delegate);
+      }
     }
-
     if (!exceptions.isEmpty()) {
       Object jsonFails = toJson("AUTHORIZATION_FAILURES", exceptions, out, 
work);
       if (work.isFormatted()) {
@@ -396,7 +399,7 @@ public class ExplainTask extends Task<Ex
         }
       }
       else if (ent.getValue() instanceof List) {
-        if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty() 
+        if (ent.getValue() != null && !((List<?>)ent.getValue()).isEmpty()
             && ((List<?>)ent.getValue()).get(0) != null &&
             ((List<?>)ent.getValue()).get(0) instanceof TezWork.Dependency) {
           if (out != null) {
@@ -525,7 +528,7 @@ public class ExplainTask extends Task<Ex
   private JSONObject outputPlan(Serializable work, PrintStream out,
       boolean extended, boolean jsonOutput, int indent, String appendToHeader) 
throws Exception {
     // Check if work has an explain annotation
-    Annotation note = work.getClass().getAnnotation(Explain.class);
+    Annotation note = AnnotationUtils.getAnnotation(work.getClass(), 
Explain.class);
 
     String keyJSONObject = null;
 
@@ -584,7 +587,7 @@ public class ExplainTask extends Task<Ex
 
     for (Method m : methods) {
       int prop_indents = jsonOutput ? 0 : indent + 2;
-      note = m.getAnnotation(Explain.class);
+      note = AnnotationUtils.getAnnotation(m, Explain.class);
 
       if (note instanceof Explain) {
         Explain xpl_note = (Explain) note;
@@ -905,6 +908,7 @@ public class ExplainTask extends Task<Ex
    *
    */
   public class MethodComparator implements Comparator<Method> {
+    @Override
     public int compare(Method m1, Method m2) {
       return m1.getName().compareTo(m2.getName());
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
 Sat Jul 26 23:45:46 2014
@@ -104,4 +104,9 @@ public abstract class ExprNodeEvaluator<
   public ExprNodeEvaluator[] getChildren() {
     return null;
   }
+  
+  @Override
+  public String toString() {
+    return "ExprNodeEvaluator[" + expr + "]";
+  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionInfo.java
 Sat Jul 26 23:45:46 2014
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.ql.udf.gen
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction;
+import org.apache.hive.common.util.AnnotationUtils;
 
 /**
  * FunctionInfo.
@@ -74,7 +75,8 @@ public class FunctionInfo implements Com
   {
     this.displayName = displayName;
     this.tableFunctionResolver = tFnCls;
-    PartitionTableFunctionDescription def = 
tableFunctionResolver.getAnnotation(PartitionTableFunctionDescription.class);
+    PartitionTableFunctionDescription def = AnnotationUtils.getAnnotation(
+        tableFunctionResolver, PartitionTableFunctionDescription.class);
     this.isNative = (def == null) ? false : def.isInternal();
     this.isInternalTableFunction = isNative;
   }
@@ -136,7 +138,7 @@ public class FunctionInfo implements Com
   }
 
   /**
-   * Get the display name for this function. This should be transfered into
+   * Get the display name for this function. This should be transferred into
    * exprNodeGenericUDFDesc, and will be used as the first parameter to
    * GenericUDF.getDisplayName() call, instead of hard-coding the function 
name.
    * This will solve the problem of displaying only one name when a udf is


Reply via email to