Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
 Sat Jul 26 23:45:46 2014
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.security.authorization.AuthorizationUtils;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilege;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
@@ -185,8 +186,12 @@ public class SQLAuthorizationUtils {
     // get privileges for this user and its role on this object
     PrincipalPrivilegeSet thrifPrivs = null;
     try {
+      HiveObjectRef objectRef = 
AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject);
+      if (objectRef.getObjectType() == null) {
+        objectRef.setObjectType(HiveObjectType.GLOBAL);
+      }
       thrifPrivs = metastoreClient.get_privilege_set(
-          AuthorizationUtils.getThriftHiveObjectRef(hivePrivObject), userName, 
null);
+          objectRef, userName, null);
     } catch (MetaException e) {
       throwGetPrivErr(e, hivePrivObject, userName);
     } catch (TException e) {
@@ -259,7 +264,7 @@ public class SQLAuthorizationUtils {
       Table thriftTableObj = null;
       try {
         thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(),
-            hivePrivObject.getTableViewURI());
+            hivePrivObject.getObjectName());
       } catch (Exception e) {
         throwGetObjErr(e, hivePrivObject);
       }
@@ -347,18 +352,15 @@ public class SQLAuthorizationUtils {
     }
   }
 
-  public static void assertNoMissingPrivilege(Collection<SQLPrivTypeGrant> 
missingPrivs,
-      HivePrincipal hivePrincipal, HivePrivilegeObject hivePrivObject)
-      throws HiveAccessControlException {
+  public static void addMissingPrivMsg(Collection<SQLPrivTypeGrant> 
missingPrivs,
+      HivePrivilegeObject hivePrivObject, List<String> deniedMessages) {
     if (missingPrivs.size() != 0) {
       // there are some required privileges missing, create error message
       // sort the privileges so that error message is deterministic (for tests)
       List<SQLPrivTypeGrant> sortedmissingPrivs = new 
ArrayList<SQLPrivTypeGrant>(missingPrivs);
       Collections.sort(sortedmissingPrivs);
-
-      String errMsg = "Permission denied. " + hivePrincipal
-          + " does not have following privileges on " + hivePrivObject + " : " 
+ sortedmissingPrivs;
-      throw new HiveAccessControlException(errMsg.toString());
+      String errMsg = sortedmissingPrivs + " on " + hivePrivObject;
+      deniedMessages.add(errMsg);
     }
   }
 
@@ -399,5 +401,16 @@ public class SQLAuthorizationUtils {
     return availPrivs;
   }
 
+  public static void assertNoDeniedPermissions(HivePrincipal hivePrincipal,
+      HiveOperationType hiveOpType, List<String> deniedMessages) throws 
HiveAccessControlException {
+    if (deniedMessages.size() != 0) {
+      Collections.sort(deniedMessages);
+      String errorMessage = "Permission denied: " + hivePrincipal
+          + " does not have following privileges for operation " + hiveOpType 
+ " "
+          + deniedMessages;
+      throw new HiveAccessControlException(errorMessage);
+    }
+  }
+
 
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
 Sat Jul 26 23:45:46 2014
@@ -79,7 +79,7 @@ public class SQLStdHiveAccessController 
   private HiveRoleGrant adminRole;
   private final String ADMIN_ONLY_MSG = "User has to belong to ADMIN role and "
       + "have it as current role, for this action.";
-  private final String HAS_ADMIN_PRIV_MSG = "grantor need to have ADMIN 
privileges on role being"
+  private final String HAS_ADMIN_PRIV_MSG = "grantor need to have ADMIN OPTION 
on role being"
       + " granted and have it as a current role for this action.";
   public static final Log LOG = 
LogFactory.getLog(SQLStdHiveAccessController.class);
 
@@ -236,7 +236,7 @@ public class SQLStdHiveAccessController 
       // So this will revoke privileges that are granted by other users.This is
       // not SQL compliant behavior. Need to change/add a metastore api
       // that has desired behavior.
-      metastoreClient.revoke_privileges(new PrivilegeBag(revokePrivs));
+      metastoreClient.revoke_privileges(new PrivilegeBag(revokePrivs), 
grantOption);
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error revoking privileges", e);
     }
@@ -308,11 +308,6 @@ public class SQLStdHiveAccessController 
   public void revokeRole(List<HivePrincipal> hivePrincipals, List<String> 
roleNames,
     boolean grantOption, HivePrincipal grantorPrinc) throws 
HiveAuthzPluginException,
     HiveAccessControlException {
-    if (grantOption) {
-      // removing grant privileges only is not supported in metastore api
-      throw new HiveAuthzPluginException("Revoking only the admin privileges 
on "
-        + "role is not currently supported");
-    }
     if (!(isUserAdmin() || doesUserHasAdminOption(roleNames))) {
       throw new HiveAccessControlException("Current user : " + 
currentUserName+ " is not"
           + " allowed to revoke role. " + ADMIN_ONLY_MSG + " Otherwise, " + 
HAS_ADMIN_PRIV_MSG);
@@ -322,7 +317,7 @@ public class SQLStdHiveAccessController 
         try {
           IMetaStoreClient mClient = 
metastoreClientFactory.getHiveMetastoreClient();
           mClient.revoke_role(roleName, hivePrincipal.getName(),
-              
AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()));
+              
AuthorizationUtils.getThriftPrincipalType(hivePrincipal.getType()), 
grantOption);
         } catch (Exception e) {
           String msg = "Error revoking roles for " + hivePrincipal.getName() + 
" to role "
               + roleName + ": " + e.getMessage();
@@ -355,19 +350,24 @@ public class SQLStdHiveAccessController 
         + " allowed get principals in a role. " + ADMIN_ONLY_MSG);
     }
     try {
-      GetPrincipalsInRoleResponse princGrantInfo =
-          
metastoreClientFactory.getHiveMetastoreClient().get_principals_in_role(new 
GetPrincipalsInRoleRequest(roleName));
-
-      List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
-      for(RolePrincipalGrant thriftRoleGrant :  
princGrantInfo.getPrincipalGrants()){
-        hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
-      }
-      return hiveRoleGrants;
+      return 
getHiveRoleGrants(metastoreClientFactory.getHiveMetastoreClient(), roleName);
     } catch (Exception e) {
       throw new HiveAuthzPluginException("Error getting principals for all 
roles", e);
     }
   }
 
+  public static List<HiveRoleGrant> getHiveRoleGrants(IMetaStoreClient client, 
String roleName)
+      throws Exception {
+    GetPrincipalsInRoleRequest request = new 
GetPrincipalsInRoleRequest(roleName);
+    GetPrincipalsInRoleResponse princGrantInfo = 
client.get_principals_in_role(request);
+
+    List<HiveRoleGrant> hiveRoleGrants = new ArrayList<HiveRoleGrant>();
+    for(RolePrincipalGrant thriftRoleGrant :  
princGrantInfo.getPrincipalGrants()){
+      hiveRoleGrants.add(new HiveRoleGrant(thriftRoleGrant));
+    }
+    return hiveRoleGrants;
+  }
+
   @Override
   public List<HivePrivilegeInfo> showPrivileges(HivePrincipal principal, 
HivePrivilegeObject privObj)
       throws HiveAuthzPluginException {
@@ -420,8 +420,8 @@ public class SQLStdHiveAccessController 
         }
 
         HivePrivilegeObject resPrivObj = new HivePrivilegeObject(
-            getPluginObjType(msObjRef.getObjectType()), msObjRef.getDbName(),
-            msObjRef.getObjectName());
+            getPluginPrivilegeObjType(msObjRef.getObjectType()), 
msObjRef.getDbName(),
+            msObjRef.getObjectName(), msObjRef.getPartValues(), 
msObjRef.getColumnName());
 
         // result grantor principal
         HivePrincipal grantorPrincipal = new 
HivePrincipal(msGrantInfo.getGrantor(),
@@ -479,8 +479,14 @@ public class SQLStdHiveAccessController 
     return false;
   }
 
-  private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
-      throws HiveAuthzPluginException {
+  /**
+   * Convert metastore object type to HivePrivilegeObjectType.
+   * Also verifies that metastore object type is of a type on which metastore 
privileges are
+   * supported by sql std auth.
+   * @param objectType
+   * @return corresponding HivePrivilegeObjectType
+   */
+  private HivePrivilegeObjectType getPluginPrivilegeObjType(HiveObjectType 
objectType) {
     switch (objectType) {
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;
@@ -682,9 +688,6 @@ public class SQLStdHiveAccessController 
     LOG.debug("Configuring hooks : " + hooks);
     hiveConf.setVar(ConfVars.PREEXECHOOKS, hooks);
 
-    // set security command list to only allow set command
-    hiveConf.setVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST, "set");
-
     // restrict the variables that can be set using set command to a list in 
whitelist
     hiveConf.setIsModWhiteListEnabled(true);
     String whiteListParamsStr = 
hiveConf.getVar(ConfVars.HIVE_AUTHORIZATION_SQL_STD_AUTH_CONFIG_WHITELIST);

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
 Sat Jul 26 23:45:46 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
@@ -28,13 +29,13 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizationValidator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrincipal.HivePrincipalType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
-import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.Operation2Privilege.IOType;
 
 public class SQLStdHiveAuthorizationValidator implements 
HiveAuthorizationValidator {
@@ -57,13 +58,13 @@ public class SQLStdHiveAuthorizationVali
 
   @Override
   public void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> inputHObjs,
-      List<HivePrivilegeObject> outputHObjs) throws HiveAuthzPluginException,
-      HiveAccessControlException {
+      List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context)
+      throws HiveAuthzPluginException, HiveAccessControlException {
 
     if (LOG.isDebugEnabled()) {
       String msg = "Checking privileges for operation " + hiveOpType + " by 
user "
           + authenticator.getUserName() + " on " + " input objects " + 
inputHObjs
-          + " and output objects " + outputHObjs;
+          + " and output objects " + outputHObjs + ". Context Info: " + 
context;
       LOG.debug(msg);
     }
 
@@ -71,15 +72,22 @@ public class SQLStdHiveAuthorizationVali
     IMetaStoreClient metastoreClient = 
metastoreClientFactory.getHiveMetastoreClient();
 
     // check privileges on input and output objects
-    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, 
IOType.INPUT);
-    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, 
IOType.OUTPUT);
+    List<String> deniedMessages = new ArrayList<String>();
+    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, 
IOType.INPUT, deniedMessages);
+    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, 
IOType.OUTPUT, deniedMessages);
 
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), hiveOpType, deniedMessages);
   }
 
   private void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> hiveObjects,
-      IMetaStoreClient metastoreClient, String userName, IOType ioType)
+      IMetaStoreClient metastoreClient, String userName, IOType ioType, 
List<String> deniedMessages)
       throws HiveAuthzPluginException, HiveAccessControlException {
 
+    if (hiveObjects == null) {
+      return;
+    }
+
     // Compare required privileges and available privileges for each hive 
object
     for (HivePrivilegeObject hiveObj : hiveObjects) {
 
@@ -87,26 +95,34 @@ public class SQLStdHiveAuthorizationVali
           ioType);
 
       // find available privileges
-      RequiredPrivileges availPrivs;
-      if (hiveObj.getType() == HivePrivilegeObjectType.LOCAL_URI
-          || hiveObj.getType() == HivePrivilegeObjectType.DFS_URI) {
-        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new 
Path(hiveObj.getTableViewURI()),
+      RequiredPrivileges availPrivs = new RequiredPrivileges(); //start with 
an empty priv set;
+      switch (hiveObj.getType()) {
+      case LOCAL_URI:
+      case DFS_URI:
+        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new 
Path(hiveObj.getObjectName()),
             conf, userName);
-      } else if (hiveObj.getType() == HivePrivilegeObjectType.PARTITION) {
+        break;
+      case PARTITION:
         // sql std authorization is managing privileges at the table/view 
levels
         // only
         // ignore partitions
         continue;
-      } else {
-        // get the privileges that this user has on the object
+      case COMMAND_PARAMS:
+      case FUNCTION:
+        // operations that have objects of type COMMAND_PARAMS, FUNCTION are 
authorized
+        // solely on the type
+        if (privController.isUserAdmin()) {
+          availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
+        }
+        break;
+      default:
         availPrivs = 
SQLAuthorizationUtils.getPrivilegesFromMetaStore(metastoreClient, userName,
             hiveObj, privController.getCurrentRoleNames(), 
privController.isUserAdmin());
       }
 
       // Verify that there are no missing privileges
       Collection<SQLPrivTypeGrant> missingPriv = 
requiredPrivs.findMissingPrivs(availPrivs);
-      SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new 
HivePrincipal(userName,
-          HivePrincipalType.USER), hiveObj);
+      SQLAuthorizationUtils.addMissingPrivMsg(missingPriv, hiveObj, 
deniedMessages);
 
     }
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
 Sat Jul 26 23:45:46 2014
@@ -41,6 +41,7 @@ import org.apache.hadoop.conf.Configurat
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -55,6 +56,7 @@ import org.apache.hadoop.hive.ql.log.Per
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import 
org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider;
@@ -65,6 +67,8 @@ import org.apache.hadoop.hive.ql.util.Do
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.util.ReflectionUtils;
 
+import com.google.common.base.Preconditions;
+
 /**
  * SessionState encapsulates common data associated with a session.
  *
@@ -75,6 +79,12 @@ import org.apache.hadoop.util.Reflection
 public class SessionState {
   private static final Log LOG = LogFactory.getLog(SessionState.class);
 
+  private static final String TMP_PREFIX = "_tmp_space.db";
+  private static final String LOCAL_SESSION_PATH_KEY = 
"_hive.local.session.path";
+  private static final String HDFS_SESSION_PATH_KEY = 
"_hive.hdfs.session.path";
+  private static final String TMP_TABLE_SPACE_KEY = "_hive.tmp_table_space";
+  private final Map<String, Map<String, Table>> tempTables = new 
HashMap<String, Map<String, Table>>();
+
   protected ClassLoader parentLoader;
 
   /**
@@ -163,6 +173,8 @@ public class SessionState {
   private final String CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER =
       "hive.internal.ss.authz.settings.applied.marker";
 
+  private String userIpAddress;
+
   /**
    * Lineage state.
    */
@@ -173,6 +185,24 @@ public class SessionState {
   private final String userName;
 
   /**
+   *  scratch path to use for all non-local (ie. hdfs) file system tmp folders
+   *  @return Path for Scratch path for the current session
+   */
+  private Path hdfsSessionPath;
+
+  /**
+   * sub dir of hdfs session path. used to keep tmp tables
+   * @return Path for temporary tables created by the current session
+   */
+  private Path hdfsTmpTableSpace;
+
+  /**
+   *  scratch directory to use for local file system tmp folders
+   *  @return Path for local scratch directory for current session
+   */
+  private Path localSessionPath;
+
+  /**
    * Get the lineage state stored in this session.
    *
    * @return LineageState
@@ -335,6 +365,7 @@ public class SessionState {
       Hive.get(new HiveConf(startSs.conf)).getMSC();
       ShimLoader.getHadoopShims().getUGIForConf(startSs.conf);
       FileSystem.get(startSs.conf);
+      startSs.createSessionPaths(startSs.conf);
     } catch (Exception e) {
       // catch-all due to some exec time dependencies on session state
       // that would cause ClassNoFoundException otherwise
@@ -359,6 +390,95 @@ public class SessionState {
     return startSs;
   }
 
+  public static Path getLocalSessionPath(Configuration conf) {
+    SessionState ss = SessionState.get();
+    if (ss == null) {
+      String localPathString = conf.get(LOCAL_SESSION_PATH_KEY);
+      Preconditions.checkNotNull(localPathString,
+          "Conf local session path expected to be non-null");
+      return new Path(localPathString);
+    }
+    Preconditions.checkNotNull(ss.localSessionPath,
+        "Local session path expected to be non-null");
+    return ss.localSessionPath;
+  }
+
+  public static Path getHDFSSessionPath(Configuration conf) {
+    SessionState ss = SessionState.get();
+    if (ss == null) {
+      String sessionPathString = conf.get(HDFS_SESSION_PATH_KEY);
+      Preconditions.checkNotNull(sessionPathString,
+          "Conf non-local session path expected to be non-null");
+      return new Path(sessionPathString);
+    }
+    Preconditions.checkNotNull(ss.hdfsSessionPath,
+        "Non-local session path expected to be non-null");
+    return ss.hdfsSessionPath;
+  }
+
+  public static Path getTempTableSpace(Configuration conf) {
+    SessionState ss = SessionState.get();
+    if (ss == null) {
+      String tempTablePathString = conf.get(TMP_TABLE_SPACE_KEY);
+      Preconditions.checkNotNull(tempTablePathString,
+          "Conf temp table path expected to be non-null");
+      return new Path(tempTablePathString);
+    }
+    return ss.getTempTableSpace();
+  }
+
+  public Path getTempTableSpace() {
+    Preconditions.checkNotNull(this.hdfsTmpTableSpace,
+        "Temp table path expected to be non-null");
+    return this.hdfsTmpTableSpace;
+  }
+
+  private void dropSessionPaths(Configuration conf) throws IOException {
+    if (hdfsSessionPath != null) {
+      hdfsSessionPath.getFileSystem(conf).delete(hdfsSessionPath, true);
+    }
+    if (localSessionPath != null) {
+      localSessionPath.getFileSystem(conf).delete(localSessionPath, true);
+    }
+  }
+
+  private void createSessionPaths(Configuration conf) throws IOException {
+
+    String scratchDirPermission = HiveConf.getVar(conf, 
HiveConf.ConfVars.SCRATCHDIRPERMISSION);
+    String sessionId = getSessionId();
+
+    // local & non-local tmp location is configurable. however it is the same 
across
+    // all external file systems
+    hdfsSessionPath =
+      new Path(HiveConf.getVar(conf, HiveConf.ConfVars.SCRATCHDIR),
+               sessionId);
+    createPath(conf, hdfsSessionPath, scratchDirPermission);
+    conf.set(HDFS_SESSION_PATH_KEY, hdfsSessionPath.toUri().toString());
+
+    localSessionPath = new Path(HiveConf.getVar(conf, 
HiveConf.ConfVars.LOCALSCRATCHDIR),
+                                sessionId);
+    createPath(conf, localSessionPath, scratchDirPermission);
+    conf.set(LOCAL_SESSION_PATH_KEY, localSessionPath.toUri().toString());
+    hdfsTmpTableSpace = new Path(hdfsSessionPath, TMP_PREFIX);
+    createPath(conf, hdfsTmpTableSpace, scratchDirPermission);
+    conf.set(TMP_TABLE_SPACE_KEY, hdfsTmpTableSpace.toUri().toString());
+  }
+
+  private void createPath(Configuration conf, Path p, String perm) throws 
IOException {
+    FileSystem fs = p.getFileSystem(conf);
+    p = new Path(fs.makeQualified(p).toString());
+    FsPermission fsPermission = new FsPermission(Short.parseShort(perm.trim(), 
8));
+
+    if (!Utilities.createDirsWithPermission(conf, p, fsPermission)) {
+      throw new IOException("Cannot create directory: "
+                            + p.toString());
+    }
+
+    // best effort to clean up if we don't shut down properly
+    fs.deleteOnExit(p);
+  }
+
+
   /**
    * Setup authentication and authorization plugins for this session.
    */
@@ -397,13 +517,32 @@ public class SessionState {
     }
 
     if(LOG.isDebugEnabled()){
-      Object authorizationClass = getAuthorizationMode() == 
AuthorizationMode.V1 ?
-          getAuthorizer() : getAuthorizerV2();
-          LOG.debug("Session is using authorization class " + 
authorizationClass.getClass());
+      Object authorizationClass = getActiveAuthorizer();
+      LOG.debug("Session is using authorization class " + 
authorizationClass.getClass());
     }
     return;
   }
 
+  public Object getActiveAuthorizer() {
+    return getAuthorizationMode() == AuthorizationMode.V1 ?
+        getAuthorizer() : getAuthorizerV2();
+  }
+
+  public Class getAuthorizerInterface() {
+    return getAuthorizationMode() == AuthorizationMode.V1 ?
+        HiveAuthorizationProvider.class : HiveAuthorizer.class;
+  }
+
+  public void setActiveAuthorizer(Object authorizer) {
+    if (authorizer instanceof HiveAuthorizationProvider) {
+      this.authorizer = (HiveAuthorizationProvider)authorizer;
+    } else if (authorizer instanceof HiveAuthorizer) {
+      this.authorizerV2 = (HiveAuthorizer) authorizer;
+    } else if (authorizer != null) {
+      throw new IllegalArgumentException("Invalid authorizer " + authorizer);
+    }
+  }
+
   /**
    * @param conf
    * @return per-session temp file
@@ -923,6 +1062,8 @@ public class SessionState {
     } finally {
       tezSessionState = null;
     }
+
+    dropSessionPaths(conf);
   }
 
   public AuthorizationMode getAuthorizationMode(){
@@ -992,4 +1133,24 @@ public class SessionState {
     conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString());
 
   }
+
+  public Map<String, Map<String, Table>> getTempTables() {
+    return tempTables;
+  }
+
+  /**
+   * @return ip address for user running the query
+   */
+  public String getUserIpAddress() {
+    return userIpAddress;
+  }
+
+  /**
+   * set the ip address for user running the query
+   * @param userIpAddress
+   */
+  public void setUserIpAddress(String userIpAddress) {
+    this.userIpAddress = userIpAddress;
+  }
+
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java 
(original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java 
Sat Jul 26 23:45:46 2014
@@ -255,6 +255,10 @@ public class StatsUtils {
     int avgRowSize = 0;
     for (String neededCol : neededColumns) {
       ColumnInfo ci = getColumnInfoForColumn(neededCol, schema);
+      if (ci == null) {
+        // No need to collect statistics of index columns
+        continue;
+      }
       ObjectInspector oi = ci.getObjectInspector();
       String colType = ci.getTypeName();
       if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFComputeStats.java
 Sat Jul 26 23:45:46 2014
@@ -594,14 +594,14 @@ public class GenericUDAFComputeStats ext
 
       @Override
       protected void updateMin(Object minValue, LongObjectInspector 
minFieldOI) {
-        if (min == null || (minValue != null && min > 
minFieldOI.get(minValue))) {
+        if ((minValue != null) && (min == null || (min > 
minFieldOI.get(minValue)))) {
           min = minFieldOI.get(minValue);
         }
       }
 
       @Override
       protected void updateMax(Object maxValue, LongObjectInspector 
maxFieldOI) {
-        if (max == null || (maxValue != null && max < 
maxFieldOI.get(maxValue))) {
+        if ((maxValue != null ) && (max == null || (max < 
maxFieldOI.get(maxValue)))) {
           max = maxFieldOI.get(maxValue);
         }
       }
@@ -657,14 +657,14 @@ public class GenericUDAFComputeStats ext
 
       @Override
       protected void updateMin(Object minValue, DoubleObjectInspector 
minFieldOI) {
-        if (min == null || (minValue != null && min > 
minFieldOI.get(minValue))) {
+        if ((minValue != null) && (min == null || (min > 
minFieldOI.get(minValue)))) {
           min = minFieldOI.get(minValue);
         }
       }
 
       @Override
       protected void updateMax(Object maxValue, DoubleObjectInspector 
maxFieldOI) {
-        if (max == null || (maxValue != null && max < 
maxFieldOI.get(maxValue))) {
+        if ((maxValue != null ) && (max == null || (max < 
maxFieldOI.get(maxValue)))) {
           max = maxFieldOI.get(maxValue);
         }
       }
@@ -995,7 +995,7 @@ public class GenericUDAFComputeStats ext
       }
 
       if (total != 0) {
-         avgLength = (double)(myagg.sumLength / (1.0 * total));
+         avgLength = myagg.sumLength / (1.0 * total);
       }
 
       // Serialize the result struct
@@ -1241,7 +1241,7 @@ public class GenericUDAFComputeStats ext
       long count = myagg.count + myagg.countNulls;
 
       if (count != 0) {
-        avgLength = (double)(myagg.sumLength / (1.0 * (myagg.count + 
myagg.countNulls)));
+        avgLength = myagg.sumLength / (1.0 * (myagg.count + myagg.countNulls));
       }
 
       // Serialize the result struct
@@ -1287,7 +1287,7 @@ public class GenericUDAFComputeStats ext
 
       @Override
       protected void updateMin(Object minValue, HiveDecimalObjectInspector 
minFieldOI) {
-        if (min == null || (minValue != null &&
+        if ((minValue != null) && (min == null ||
             min.compareTo(minFieldOI.getPrimitiveJavaObject(minValue)) > 0)) {
           min = minFieldOI.getPrimitiveJavaObject(minValue);
         }
@@ -1295,7 +1295,7 @@ public class GenericUDAFComputeStats ext
 
       @Override
       protected void updateMax(Object maxValue, HiveDecimalObjectInspector 
maxFieldOI) {
-        if (max == null || (maxValue != null &&
+        if ((maxValue != null) && (max == null ||
             max.compareTo(maxFieldOI.getPrimitiveJavaObject(maxValue)) < 0)) {
           max = maxFieldOI.getPrimitiveJavaObject(maxValue);
         }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEvaluator.java
 Sat Jul 26 23:45:46 2014
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef;
 import org.apache.hadoop.hive.ql.udf.UDFType;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hive.common.util.AnnotationUtils;
 
 /**
  * A Generic User-defined aggregation function (GenericUDAF) for the use with
@@ -49,7 +50,7 @@ public abstract class GenericUDAFEvaluat
   public static boolean isEstimable(AggregationBuffer buffer) {
     if (buffer instanceof AbstractAggregationBuffer) {
       Class<? extends AggregationBuffer> clazz = buffer.getClass();
-      AggregationType annotation = clazz.getAnnotation(AggregationType.class);
+      AggregationType annotation = AnnotationUtils.getAnnotation(clazz, 
AggregationType.class);
       return annotation != null && annotation.estimable();
     }
     return false;
@@ -94,7 +95,7 @@ public abstract class GenericUDAFEvaluat
    * Additionally setup GenericUDAFEvaluator with MapredContext before 
initializing.
    * This is only called in runtime of MapRedTask.
    *
-   * @param context context
+   * @param mapredContext context
    */
   public void configure(MapredContext mapredContext) {
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFFormatNumber.java
 Sat Jul 26 23:45:46 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf.ge
 
 import java.text.DecimalFormat;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.Description;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
@@ -32,6 +33,7 @@ import org.apache.hadoop.hive.serde2.obj
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.FloatObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveDecimalObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.io.Text;
 
@@ -78,7 +80,8 @@ public class GenericUDFFormatNumber exte
           + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \""
           + arguments[0].getTypeName() + "\" was found.");
     }
 
@@ -104,8 +107,9 @@ public class GenericUDFFormatNumber exte
       case SHORT:
       case INT:
       case LONG:
-      case FLOAT:
       case DOUBLE:
+      case FLOAT:
+      case DECIMAL:
         break;
       default:
         throw new UDFArgumentTypeException(0, "Argument 1"
@@ -115,7 +119,8 @@ public class GenericUDFFormatNumber exte
           + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \""
           + arguments[0].getTypeName() + "\" was found.");
     }
 
@@ -168,6 +173,7 @@ public class GenericUDFFormatNumber exte
 
     double xDoubleValue = 0.0;
     float xFloatValue = 0.0f;
+    HiveDecimal xDecimalValue = null;
     int xIntValue = 0;
     long xLongValue = 0L;
 
@@ -182,6 +188,11 @@ public class GenericUDFFormatNumber exte
         xFloatValue = ((FloatObjectInspector) 
argumentOIs[0]).get(arguments[0].get());
         resultText.set(numberFormat.format(xFloatValue));
         break;
+      case DECIMAL:
+        xDecimalValue = ((HiveDecimalObjectInspector) argumentOIs[0])
+            .getPrimitiveJavaObject(arguments[0].get());
+        resultText.set(numberFormat.format(xDecimalValue.bigDecimalValue()));
+        break;
       case BYTE:
       case SHORT:
       case INT:
@@ -199,7 +210,8 @@ public class GenericUDFFormatNumber exte
           + " or \"" + serdeConstants.INT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.BIGINT_TYPE_NAME + "\""
           + " or \"" + serdeConstants.DOUBLE_TYPE_NAME + "\""
-          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\", but \""
+          + " or \"" + serdeConstants.FLOAT_TYPE_NAME + "\""
+          + " or \"" + serdeConstants.DECIMAL_TYPE_NAME + "\", but \""
           + argumentOIs[0].getTypeName() + "\" was found.");
     }
     return resultText;

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFInFile.java
 Sat Jul 26 23:45:46 2014
@@ -33,8 +33,8 @@ import org.apache.hadoop.hive.ql.exec.UD
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 
 /**
  * IN_FILE(str, filename) returns true if 'str' appears in the file specified
@@ -59,29 +59,35 @@ public class GenericUDFInFile extends Ge
           "IN_FILE() accepts exactly 2 arguments.");
     }
 
-    for (int i = 0; i < arguments.length; i++) {
-      if (!String.class.equals(
-            PrimitiveObjectInspectorUtils.
-                getJavaPrimitiveClassFromObjectInspector(arguments[i]))) {
-        throw new UDFArgumentTypeException(i, "The "
-            + GenericUDFUtils.getOrdinal(i + 1)
-            + " argument of function IN_FILE must be a string but "
-            + arguments[i].toString() + " was given.");
-      }
-    }
-
     strObjectInspector = arguments[0];
     fileObjectInspector = arguments[1];
 
-    if (!ObjectInspectorUtils.isConstantObjectInspector(fileObjectInspector)) {
-      throw new UDFArgumentTypeException(1,
-          "The second argument of IN_FILE() must be a constant string but " +
-          fileObjectInspector.toString() + " was given.");
+    if (!isTypeCompatible(strObjectInspector)) {
+      throw new UDFArgumentTypeException(0, "The first " +
+        "argument of function IN_FILE must be a string, " +
+        "char or varchar but " +
+        strObjectInspector.toString() + " was given.");
+    }
+
+    if (((PrimitiveObjectInspector) 
fileObjectInspector).getPrimitiveCategory() !=
+          PrimitiveObjectInspector.PrimitiveCategory.STRING ||
+      !ObjectInspectorUtils.isConstantObjectInspector(fileObjectInspector)) {
+      throw new UDFArgumentTypeException(1, "The second " +
+        "argument of IN_FILE() must be a constant string but " +
+        fileObjectInspector.toString() + " was given.");
     }
 
     return PrimitiveObjectInspectorFactory.javaBooleanObjectInspector;
   }
 
+  private boolean isTypeCompatible(ObjectInspector argument) {
+    PrimitiveObjectInspector poi = ((PrimitiveObjectInspector) argument);
+    return
+      poi.getPrimitiveCategory() == 
PrimitiveObjectInspector.PrimitiveCategory.STRING ||
+      poi.getPrimitiveCategory() == 
PrimitiveObjectInspector.PrimitiveCategory.CHAR ||
+      poi.getPrimitiveCategory() == 
PrimitiveObjectInspector.PrimitiveCategory.VARCHAR;
+  }
+
   @Override
   public String[] getRequiredFiles() {
     return new String[] {
@@ -96,12 +102,12 @@ public class GenericUDFInFile extends Ge
       return null;
     }
 
-    String str = (String)ObjectInspectorUtils.copyToStandardJavaObject(
-        arguments[0].get(), strObjectInspector);
+    String str = ObjectInspectorUtils.copyToStandardJavaObject(
+        arguments[0].get(), strObjectInspector).toString();
 
     if (set == null) {
       String fileName = (String)ObjectInspectorUtils.copyToStandardJavaObject(
-          arguments[1].get(), fileObjectInspector);
+        arguments[1].get(), fileObjectInspector);
       try {
         load(new FileInputStream((new File(fileName)).getName()));
       } catch (FileNotFoundException e) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFToBinary.java
 Sat Jul 26 23:45:46 2014
@@ -25,13 +25,15 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.BinaryObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveCharObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveVarcharObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.BinaryConverter;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
 
 @Description(name = "binary", value = "_FUNC_(a) - cast a to binary",
-             extended = "Currently only string or binary can be cast into 
binary")
+             extended = "Currently only string, char, varchar or binary can be 
cast into binary")
 public class GenericUDFToBinary extends GenericUDF {
 
   private transient PrimitiveObjectInspector argumentOI;
@@ -52,9 +54,11 @@ public class GenericUDFToBinary extends 
     }
 
     if (!((argumentOI instanceof BinaryObjectInspector)
+        || (argumentOI instanceof HiveCharObjectInspector)
+        || (argumentOI instanceof HiveVarcharObjectInspector)
         || (argumentOI instanceof StringObjectInspector)
         || (argumentOI instanceof VoidObjectInspector))){
-      throw new UDFArgumentException("Only string or binary data can be cast 
into binary " +
+      throw new UDFArgumentException("Only string, char, varchar or binary 
data can be cast into binary " +
                "data types.");
     }
     baConverter = new BinaryConverter(argumentOI,

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
 Sat Jul 26 23:45:46 2014
@@ -54,7 +54,7 @@ public class UDFCurrentDB extends Generi
 
   @Override
   public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    throw new IllegalStateException("never");
+    return SessionState.get().getCurrentDatabase();
   }
 
   @Override

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/tez/TestTezTask.java
 Sat Jul 26 23:45:46 2014
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.TezEdgeProperty;
 import org.apache.hadoop.hive.ql.plan.TezEdgeProperty.EdgeType;
 import org.apache.hadoop.hive.ql.plan.TezWork;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -156,6 +157,7 @@ public class TestTezTask {
     conf = new JobConf();
     appLr = mock(LocalResource.class);
 
+    SessionState.start(new HiveConf());
     session = mock(TezSession.class);
     sessionState = mock(TezSessionState.class);
     when(sessionState.getSession()).thenReturn(session);
@@ -166,6 +168,7 @@ public class TestTezTask {
 
   @After
   public void tearDown() throws Exception {
+    SessionState.get().close();
     utils = null;
     work = null;
     task = null;

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
 Sat Jul 26 23:45:46 2014
@@ -45,28 +45,36 @@ public class TestConstantVectorExpressio
     ConstantVectorExpression bytesCve = new ConstantVectorExpression(2, 
str.getBytes());
     Decimal128 decVal = new Decimal128(25.8, (short) 1);
     ConstantVectorExpression decimalCve = new ConstantVectorExpression(3, 
decVal);
-
+    ConstantVectorExpression nullCve = new ConstantVectorExpression(4, 
"string", true);
+    
     int size = 20;
-    VectorizedRowBatch vrg = 
VectorizedRowGroupGenUtil.getVectorizedRowBatch(size, 4, 0);
+    VectorizedRowBatch vrg = 
VectorizedRowGroupGenUtil.getVectorizedRowBatch(size, 5, 0);
 
     LongColumnVector lcv = (LongColumnVector) vrg.cols[0];
     DoubleColumnVector dcv = new DoubleColumnVector(size);
     BytesColumnVector bcv = new BytesColumnVector(size);
     DecimalColumnVector dv = new DecimalColumnVector(5, 1);
+    BytesColumnVector bcvn = new BytesColumnVector(size);
     vrg.cols[1] = dcv;
     vrg.cols[2] = bcv;
     vrg.cols[3] = dv;
+    vrg.cols[4] = bcvn;
 
     longCve.evaluate(vrg);
     doubleCve.evaluate(vrg);
     bytesCve.evaluate(vrg);  
     decimalCve.evaluate(vrg);
+    nullCve.evaluate(vrg);
     assertTrue(lcv.isRepeating);
     assertTrue(dcv.isRepeating);
     assertTrue(bcv.isRepeating);
     assertEquals(17, lcv.vector[0]);
     assertTrue(17.34 == dcv.vector[0]);
     
+    assertTrue(bcvn.isRepeating);
+    assertTrue(bcvn.isNull[0]);
+    assertTrue(!bcvn.noNulls);
+    
     byte[] alphaBytes = "alpha".getBytes();
     assertTrue(bcv.length[0] == alphaBytes.length);
     assertTrue(sameFirstKBytes(alphaBytes, bcv.vector[0], alphaBytes.length)); 

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestVectorExpressionWriters.java
 Sat Jul 26 23:45:46 2014
@@ -25,8 +25,11 @@ import java.util.Random;
 
 import junit.framework.Assert;
 
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.TimestampUtils;
@@ -36,6 +39,7 @@ import org.apache.hadoop.hive.ql.plan.Ex
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
 import org.apache.hadoop.hive.serde2.io.TimestampWritable;
@@ -44,6 +48,7 @@ import org.apache.hadoop.hive.serde2.obj
 import 
org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.apache.hadoop.io.BooleanWritable;
@@ -79,6 +84,10 @@ public class TestVectorExpressionWriters
     return null;
   }
 
+  private Writable getWritableValue(TypeInfo ti, Decimal128 value) {
+    return new HiveDecimalWritable(HiveDecimal.create(value.toBigDecimal()));
+  }
+
   private Writable getWritableValue(TypeInfo ti, byte[] value) {
     if (ti.equals(TypeInfoFactory.stringTypeInfo)) {
       return new Text(value);
@@ -135,7 +144,7 @@ public class TestVectorExpressionWriters
     
     VectorExpressionWriter vew = getWriter(type);
     for (int i = 0; i < vectorSize; i++) {
-      values[i] = vew.initValue(null);
+      values[i] = null;  // setValue() should be able to handle null input
       values[i] = vew.setValue(values[i], dcv, i);
       if (values[i] != null) {
         Writable expected = getWritableValue(type, dcv.vector[i]);
@@ -146,6 +155,41 @@ public class TestVectorExpressionWriters
     }
   }  
 
+  private void testWriterDecimal(DecimalTypeInfo type) throws HiveException {
+    DecimalColumnVector dcv = 
VectorizedRowGroupGenUtil.generateDecimalColumnVector(type, true, false,
+        this.vectorSize, new Random(10));
+    dcv.isNull[2] = true;
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      Writable w = (Writable) vew.writeValue(dcv, i);
+      if (w != null) {
+        Writable expected = getWritableValue(type, dcv.vector[i]);
+        Assert.assertEquals(expected, w);
+      } else {
+        Assert.assertTrue(dcv.isNull[i]);
+      }
+    }
+  }
+
+  private void testSetterDecimal(DecimalTypeInfo type) throws HiveException {
+    DecimalColumnVector dcv = 
VectorizedRowGroupGenUtil.generateDecimalColumnVector(type, true, false,
+        this.vectorSize, new Random(10));
+    dcv.isNull[2] = true;
+    Object[] values = new Object[this.vectorSize];
+
+    VectorExpressionWriter vew = getWriter(type);
+    for (int i = 0; i < vectorSize; i++) {
+      values[i] = null;  // setValue() should be able to handle null input
+      values[i] = vew.setValue(values[i], dcv, i);
+      if (values[i] != null) {
+        Writable expected = getWritableValue(type, dcv.vector[i]);
+        Assert.assertEquals(expected, values[i]);
+      } else {
+        Assert.assertTrue(dcv.isNull[i]);
+      }
+    }
+  }
+
   private void testWriterLong(TypeInfo type) throws HiveException {
     LongColumnVector lcv = 
VectorizedRowGroupGenUtil.generateLongColumnVector(true, false,
         vectorSize, new Random(10));
@@ -178,7 +222,7 @@ public class TestVectorExpressionWriters
     
     VectorExpressionWriter vew = getWriter(type);
     for (int i = 0; i < vectorSize; i++) {
-      values[i] = vew.initValue(null);
+      values[i] = null;  // setValue() should be able to handle null input
       values[i] = vew.setValue(values[i], lcv, i);
       if (values[i] != null) {
         Writable expected = getWritableValue(type, lcv.vector[i]);
@@ -290,7 +334,7 @@ public class TestVectorExpressionWriters
     Object[] values = new Object[this.vectorSize];
     VectorExpressionWriter vew = getWriter(type);
     for (int i = 0; i < vectorSize; i++) {
-      values[i] = vew.initValue(null);
+      values[i] = null;  // setValue() should be able to handle null input
       Writable w = (Writable) vew.setValue(values[i], bcv, i);
       if (w != null) {
         byte [] val = new byte[bcv.length[i]];
@@ -327,7 +371,19 @@ public class TestVectorExpressionWriters
   public void testVectorExpressionWriterLong() throws HiveException {
     testWriterLong(TypeInfoFactory.longTypeInfo);
   }
-  
+
+  @Test
+  public void testVectorExpressionWriterDecimal() throws HiveException {
+    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
+    testWriterDecimal(typeInfo);
+  }
+
+  @Test
+  public void testVectorExpressionSetterDecimal() throws HiveException {
+    DecimalTypeInfo typeInfo = TypeInfoFactory.getDecimalTypeInfo(38, 18);
+    testSetterDecimal(typeInfo);
+  }
+
   @Test
   public void testVectorExpressionSetterLong() throws HiveException {
     testSetterLong(TypeInfoFactory.longTypeInfo);

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/util/VectorizedRowGroupGenUtil.java
 Sat Jul 26 23:45:46 2014
@@ -20,9 +20,12 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 
 public class VectorizedRowGroupGenUtil {
 
@@ -103,6 +106,42 @@ public class VectorizedRowGroupGenUtil {
     return dcv;
   }
 
+  public static DecimalColumnVector 
generateDecimalColumnVector(DecimalTypeInfo typeInfo, boolean nulls,
+      boolean repeating, int size, Random rand) {
+    DecimalColumnVector dcv =
+        new DecimalColumnVector(size, typeInfo.precision(), typeInfo.scale());
+
+    dcv.noNulls = !nulls;
+    dcv.isRepeating = repeating;
+
+    Decimal128 repeatingValue = new Decimal128();
+    do{
+      repeatingValue.update(rand.nextDouble(), (short)typeInfo.scale());
+    }while(repeatingValue.doubleValue() == 0);
+
+    int nullFrequency = generateNullFrequency(rand);
+
+    for(int i = 0; i < size; i++) {
+      if(nulls && (repeating || i % nullFrequency == 0)) {
+        dcv.isNull[i] = true;
+        dcv.vector[i] = null;//Decimal128.ONE;
+
+      }else {
+        dcv.isNull[i] = false;
+        if (repeating) {
+          dcv.vector[i].update(repeatingValue);
+        } else {
+          dcv.vector[i].update(rand.nextDouble(), (short) typeInfo.scale());
+        }
+
+        if(dcv.vector[i].doubleValue() == 0) {
+          i--;
+        }
+      }
+    }
+    return dcv;
+  }
+
   private static int generateNullFrequency(Random rand) {
     return 60 + rand.nextInt(20);
   }

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
 Sat Jul 26 23:45:46 2014
@@ -177,7 +177,7 @@ public class TestSymlinkTextInputFormat 
       QueryPlan plan = drv.getPlan();
       MapRedTask selectTask = (MapRedTask)plan.getRootTasks().get(0);
 
-      List<Path> inputPaths = Utilities.getInputPaths(newJob, 
selectTask.getWork().getMapWork(), emptyScratchDir, ctx);
+      List<Path> inputPaths = Utilities.getInputPaths(newJob, 
selectTask.getWork().getMapWork(), emptyScratchDir, ctx, false);
       Utilities.setInputPaths(newJob, inputPaths);
 
       Utilities.setMapRedWork(newJob, selectTask.getWork(), 
ctx.getMRTmpPath());

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/lockmgr/TestDbTxnManager.java
 Sat Jul 26 23:45:46 2014
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.ql.hooks.R
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
 import org.junit.After;
@@ -51,6 +52,7 @@ public class TestDbTxnManager {
 
   public TestDbTxnManager() throws Exception {
     TxnDbUtil.setConfValues(conf);
+    SessionState.start(conf);
     ctx = new Context(conf);
     LogManager.getRootLogger().setLevel(Level.DEBUG);
     tearDown();

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
 Sat Jul 26 23:45:46 2014
@@ -42,10 +42,10 @@ public class TestMacroSemanticAnalyzer {
   @Before
   public void setup() throws Exception {
     conf = new HiveConf();
+    SessionState.start(conf);
     context = new Context(conf);
     parseDriver = new ParseDriver();
     analyzer = new MacroSemanticAnalyzer(conf);
-    SessionState.start(conf);
   }
 
   private ASTNode parse(String command) throws Exception {

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestHiveAuthorizationTaskFactory.java
 Sat Jul 26 23:45:46 2014
@@ -69,10 +69,10 @@ public class TestHiveAuthorizationTaskFa
     db = Mockito.mock(Hive.class);
     table = new Table(DB, TABLE);
     partition = new Partition(table);
+    SessionState.start(conf);
     context = new Context(conf);
     parseDriver = new ParseDriver();
     analyzer = new DDLSemanticAnalyzer(conf, db);
-    SessionState.start(conf);
     Mockito.when(db.getTable(DB, TABLE, false)).thenReturn(table);
     Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
       .thenReturn(partition);

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveOperationType.java
 Sat Jul 26 23:45:46 2014
@@ -17,7 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.fail;
 
 import org.apache.hadoop.hive.ql.plan.HiveOperation;
 import org.junit.Test;
@@ -40,8 +40,6 @@ public class TestHiveOperationType {
         fail("Unable to find corresponding type in HiveOperationType for " + 
op + " : " +  ex );
       }
     }
-    assertEquals("Check if HiveOperation, HiveOperationType have same number 
of instances",
-        HiveOperation.values().length, HiveOperationType.values().length);
   }
 
 }

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessController.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessController.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessController.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/TestSQLStdHiveAccessController.java
 Sat Jul 26 23:45:46 2014
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
-import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
@@ -42,17 +41,13 @@ public class TestSQLStdHiveAccessControl
    * @throws HiveAuthzPluginException
    */
   @Test
-  public void checkConfigProcessing() throws HiveAuthzPluginException {
+  public void testConfigProcessing() throws HiveAuthzPluginException {
     HiveConf processedConf = new HiveConf();
 
     SQLStdHiveAccessController accessController = new 
SQLStdHiveAccessController(null,
         processedConf, new HadoopDefaultAuthenticator());
     accessController.applyAuthorizationConfigPolicy(processedConf);
 
-    // check that unsafe commands have been disabled
-    assertEquals("only set command should be allowed",
-        processedConf.getVar(ConfVars.HIVE_SECURITY_COMMAND_WHITELIST), "set");
-
     // check that hook to disable transforms has been added
     assertTrue("Check for transform query disabling hook",
         
processedConf.getVar(ConfVars.PREEXECHOOKS).contains(DisallowTransformHook.class.getName()));
@@ -90,7 +85,7 @@ public class TestSQLStdHiveAccessControl
    * @throws HiveAuthzPluginException
    */
   @Test
-  public void checkConfigProcessingCustomSetWhitelist() throws 
HiveAuthzPluginException {
+  public void testConfigProcessingCustomSetWhitelist() throws 
HiveAuthzPluginException {
 
     HiveConf processedConf = new HiveConf();
     // add custom value, including one from the default, one new one

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestCleaner.java
 Sat Jul 26 23:45:46 2014
@@ -23,7 +23,6 @@ import org.apache.hadoop.hive.conf.HiveC
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -68,7 +67,7 @@ public class TestCleaner extends Compact
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
 
     // Check that the files are removed
     List<Path> paths = getDirectories(conf, t, null);
@@ -101,7 +100,7 @@ public class TestCleaner extends Compact
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
 
     // Check that the files are removed
     List<Path> paths = getDirectories(conf, t, p);
@@ -132,7 +131,7 @@ public class TestCleaner extends Compact
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
 
     // Check that the files are removed
     List<Path> paths = getDirectories(conf, t, null);
@@ -172,7 +171,7 @@ public class TestCleaner extends Compact
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
 
     // Check that the files are removed
     List<Path> paths = getDirectories(conf, t, p);
@@ -289,7 +288,7 @@ public class TestCleaner extends Compact
 
     // Check there are no compactions requests left.
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
 
     // Check that the files are removed
     List<Path> paths = getDirectories(conf, t, p);

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/txn/compactor/TestInitiator.java
 Sat Jul 26 23:45:46 2014
@@ -23,7 +23,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.*;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
-import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -187,7 +186,7 @@ public class TestInitiator extends Compa
     startInitiator(conf);
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
   }
 
   @Test
@@ -246,7 +245,7 @@ public class TestInitiator extends Compa
     startInitiator(conf);
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
   }
 
   @Test
@@ -379,7 +378,7 @@ public class TestInitiator extends Compa
     startInitiator(conf);
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
   }
 
   @Test
@@ -492,7 +491,7 @@ public class TestInitiator extends Compa
     startInitiator(conf);
 
     ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
-    Assert.assertNull(rsp.getCompacts());
+    Assert.assertEquals(0, rsp.getCompactsSize());
   }
 
   @Test

Modified: 
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_addjar.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_addjar.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_addjar.q 
(original)
+++ 
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_addjar.q 
Sat Jul 26 23:45:46 2014
@@ -1,7 +1,4 @@
 set hive.security.authorization.enabled=true;
 set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 
--- running a sql query to initialize the authorization - not needed in real 
HS2 mode
-show tables;
-
-add jar 
${system:maven.local.repository}/org/apache/hive/hcatalog/hive-hcatalog-core/${system:hive.version}/hive-hcatalog-core-${system:hive.version}.jar;
+add jar dummy.jar

Modified: 
hive/branches/spark/ql/src/test/queries/clientnegative/authorization_dfs.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientnegative/authorization_dfs.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientnegative/authorization_dfs.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientnegative/authorization_dfs.q 
Sat Jul 26 23:45:46 2014
@@ -1,7 +1,4 @@
 set hive.security.authorization.enabled=true;
 set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
 
--- running a sql query to initialize the authorization - not needed in real 
HS2 mode
-show tables;
-dfs -ls ${system:test.tmp.dir}/
-
+dfs -ls dummy_file;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_filter.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_filter.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_filter.q 
(original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/annotate_stats_filter.q 
Sat Jul 26 23:45:46 2014
@@ -50,6 +50,20 @@ explain extended select * from loc_orc w
 -- numRows: 0 rawDataSize: 0
 explain extended select * from loc_orc where !true;
 
+-- Constant evaluation. true will pass all rows, false will not pass any rows
+-- numRows: 8 rawDataSize: 804
+explain extended select * from loc_orc where true;
+-- numRows: 8 rawDataSize: 804
+explain extended select * from loc_orc where 'foo';
+-- numRows: 8 rawDataSize: 804
+explain extended select * from loc_orc where true = true;
+-- numRows: 0 rawDataSize: 0
+explain extended select * from loc_orc where false = true;
+-- numRows: 0 rawDataSize: 0
+explain extended select * from loc_orc where 'foo' = 'bar';
+-- numRows: 0 rawDataSize: 0
+explain extended select * from loc_orc where false;
+
 -- OR evaluation. 1 row for OH and 1 row for CA
 -- numRows: 2 rawDataSize: 204
 explain extended select * from loc_orc where state='OH' or state='CA';

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/authorization_9.q 
Sat Jul 26 23:45:46 2014
@@ -15,3 +15,11 @@ grant select on table dummy to user hive
 grant select (key, value) on table dummy to user hive_test_user2;
 
 show grant on all;
+
+revoke select on database default from user hive_test_user;
+revoke select on table dummy from user hive_test_user;
+revoke select (key, value) on table dummy from user hive_test_user;
+
+revoke select on database default from user hive_test_user2;
+revoke select on table dummy from user hive_test_user2;
+revoke select (key, value) on table dummy from user hive_test_user2;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q
 (original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_revoke_table_priv.q
 Sat Jul 26 23:45:46 2014
@@ -74,6 +74,13 @@ set user.name=user2;
 SHOW GRANT USER user2 ON TABLE table_priv_rev;
 set user.name=user1;
 
+-- revoke grant option for select privilege from user2
+REVOKE GRANT OPTION FOR SELECT ON TABLE table_priv_rev FROM USER user2;
+
+set user.name=user2;
+SHOW GRANT USER user2 ON TABLE table_priv_rev;
+set user.name=user1;
+
 -- revoke select privilege from user2
 REVOKE SELECT ON TABLE table_priv_rev FROM USER user2;
 

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q
 (original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_role_grant1.q
 Sat Jul 26 23:45:46 2014
@@ -26,6 +26,10 @@ create role src_role_wadmin;
 grant src_role_wadmin to user user2 with admin option;
 show role grant user user2;
 
+-- revoke admin option
+revoke admin option for src_role_wadmin from user user2;
+show role grant user user2;
+
 -- revoke role without role keyword
 revoke src_role_wadmin from user user2;
 show role grant user user2;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q
 (original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/authorization_view_sqlstd.q
 Sat Jul 26 23:45:46 2014
@@ -30,6 +30,8 @@ show grant user user3 on table vt1;
 
 
 set user.name=user2;
+
+explain authorization select * from vt1;
 select * from vt1;
 
 set user.name=user1;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/avro_partitioned.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/avro_partitioned.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/avro_partitioned.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/avro_partitioned.q 
Sat Jul 26 23:45:46 2014
@@ -72,7 +72,9 @@ SELECT * FROM episodes_partitioned ORDER
 SELECT * FROM episodes_partitioned WHERE doctor_pt = 6;
 -- Fetch w/non-existent partition
 SELECT * FROM episodes_partitioned WHERE doctor_pt = 7 LIMIT 5;
-
+-- Alter table add an empty partition
+ALTER TABLE episodes_partitioned ADD PARTITION (doctor_pt=7);
+SELECT COUNT(*) FROM episodes_partitioned;
 
 -- Verify that reading from an Avro partition works
 -- even if it has an old schema relative to the current table level schema

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/columnstats_partlvl.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/columnstats_partlvl.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/columnstats_partlvl.q 
(original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/columnstats_partlvl.q 
Sat Jul 26 23:45:46 2014
@@ -25,3 +25,9 @@ analyze table Employee_Part partition (e
 
 describe formatted Employee_Part.employeeID   partition 
(employeeSalary=2000.0);
 describe formatted Employee_Part.employeeName partition 
(employeeSalary=2000.0);
+
+explain 
+analyze table Employee_Part  compute statistics for columns;
+analyze table Employee_Part  compute statistics for columns;
+
+describe formatted Employee_Part.employeeID;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/nonmr_fetch_threshold.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/nonmr_fetch_threshold.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/nonmr_fetch_threshold.q 
(original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/nonmr_fetch_threshold.q 
Sat Jul 26 23:45:46 2014
@@ -5,5 +5,6 @@ explain select cast(key as int) * 10, up
 
 set hive.fetch.task.conversion.threshold=100;
 
+-- from HIVE-7397, limit + partition pruning filter
 explain select * from srcpart where ds='2008-04-08' AND hr='11' limit 10;
 explain select cast(key as int) * 10, upper(value) from src limit 10;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_date.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_date.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_date.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/orc_ppd_date.q Sat 
Jul 26 23:45:46 2014
@@ -12,6 +12,9 @@ select sum(hash(*)) from newtypesorc whe
 set hive.optimize.index.filter=true;
 select sum(hash(*)) from newtypesorc where da='1970-02-20';
 
+set hive.optimize.index.filter=true;
+select sum(hash(*)) from newtypesorc where da= date '1970-02-20';
+
 set hive.optimize.index.filter=false;
 select sum(hash(*)) from newtypesorc where da=cast('1970-02-20' as date);
 

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/smb_mapjoin_18.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/smb_mapjoin_18.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/smb_mapjoin_18.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/smb_mapjoin_18.q Sat 
Jul 26 23:45:46 2014
@@ -36,8 +36,10 @@ select count(*) from test_table2 where d
 select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = 
'1';
 select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = 
'1';
 
+set hive.optimize.constant.propagation=false;
 -- Insert data into the bucketed table by selecting from another bucketed table
 -- This should be a map-only operation, one of the buckets should be empty
+
 EXPLAIN
 INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2')
 SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238;
@@ -45,6 +47,7 @@ SELECT a.key, a.value FROM test_table1 a
 INSERT OVERWRITE TABLE test_table2 PARTITION (ds = '2')
 SELECT a.key, a.value FROM test_table1 a WHERE a.ds = '1' and a.key = 238;
 
+set hive.optimize.constant.propagation=true;
 select count(*) from test_table2 where ds = '2';
 select count(*) from test_table2 where ds = '2' and hash(key) % 2 = 0;
 select count(*) from test_table2 where ds = '2' and hash(key) % 2 = 1;

Modified: hive/branches/spark/ql/src/test/queries/clientpositive/stats19.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/stats19.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/stats19.q (original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/stats19.q Sat Jul 26 
23:45:46 2014
@@ -17,7 +17,7 @@ set hive.stats.key.prefix.max.length=0;
 -- The stats key should be hashed since the max length is too small
 insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') 
select key, value from src;
 
-set hive.stats.key.prefix.max.length=200;
+set hive.stats.key.prefix.max.length=250;
 
 -- The stats key should not be hashed since the max length is large enough
 insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') 
select key, value from src;
@@ -41,7 +41,7 @@ insert overwrite table stats_part partit
 
 desc formatted stats_part partition (ds='2010-04-08', hr = '13');
 
-set hive.stats.key.prefix.max.length=200;
+set hive.stats.key.prefix.max.length=250;
 
 -- The stats key should not be hashed since the max length is large enough
 insert overwrite table stats_part partition (ds='2010-04-08', hr = '13') 
select key, value from src;
@@ -66,7 +66,7 @@ set hive.stats.key.prefix.max.length=0;
 -- The stats key should be hashed since the max length is too small
 insert overwrite table stats_part partition (ds='2010-04-08', hr) select key, 
value, '13' from src;
 
-set hive.stats.key.prefix.max.length=200;
+set hive.stats.key.prefix.max.length=250;
 
 -- The stats key should not be hashed since the max length is large enough
 insert overwrite table stats_part partition (ds='2010-04-08', hr) select key, 
value, '13' from src;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/subquery_exists_having.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/subquery_exists_having.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/queries/clientpositive/subquery_exists_having.q 
(original)
+++ 
hive/branches/spark/ql/src/test/queries/clientpositive/subquery_exists_having.q 
Sat Jul 26 23:45:46 2014
@@ -1,4 +1,4 @@
-
+set hive.optimize.correlation=false;
 
 -- no agg, corr
 explain
@@ -22,6 +22,30 @@ having exists 
   )
 ;
 
+set hive.optimize.correlation=true;
+
+-- no agg, corr
+explain
+select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+;
+
+select b.key, count(*)
+from src b
+group by b.key
+having exists
+  (select a.key
+  from src a
+  where a.key = b.key and a.value > 'val_9'
+  )
+;
+
 -- view test
 create view cv1 as 
 select b.key, count(*) as c

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_having.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_having.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_having.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/subquery_in_having.q 
Sat Jul 26 23:45:46 2014
@@ -40,6 +40,8 @@ group by key, value
 having count(*) in (select count(*) from src s1 where s1.key > '9'  and 
s1.value = b.value group by s1.key )
 ;
 
+set hive.optimize.correlation=false;
+
 -- agg, non corr
 explain
 select p_mfgr, avg(p_size)
@@ -53,6 +55,21 @@ having b.p_mfgr in 
    )
 ;
 
+set hive.optimize.correlation=true;
+
+-- agg, non corr
+explain
+select p_mfgr, avg(p_size)
+from part b
+group by b.p_mfgr
+having b.p_mfgr in
+   (select p_mfgr
+    from part
+    group by p_mfgr
+    having max(p_size) - min(p_size) < 20
+   )
+;
+
 -- join on agg
 select b.key, min(b.value)
 from src b

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/subquery_views.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/subquery_views.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/subquery_views.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/subquery_views.q Sat 
Jul 26 23:45:46 2014
@@ -26,6 +26,11 @@ where b.key not in
   )
 ;
 
+explain
+select * 
+from cv2 where cv2.key in (select key from cv2 c where c.key < '11');
+;
+
 select * 
 from cv2 where cv2.key in (select key from cv2 c where c.key < '11');
 ;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/truncate_table.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/truncate_table.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/truncate_table.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/truncate_table.q Sat 
Jul 26 23:45:46 2014
@@ -12,24 +12,31 @@ load data local inpath '../../data/files
 load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate 
partition (ds='2008-04-09', hr='11');
 load data local inpath '../../data/files/kv1.txt' into table srcpart_truncate 
partition (ds='2008-04-09', hr='12');
 
+analyze table src_truncate     compute statistics;
+analyze table srcpart_truncate partition(ds,hr) compute statistics;
 set hive.fetch.task.conversion=more;
+set hive.compute.query.using.stats=true;
 
 -- truncate non-partitioned table
 explain TRUNCATE TABLE src_truncate;
 TRUNCATE TABLE src_truncate;
 select * from src_truncate;
+select count (*) from src_truncate;
 
 -- truncate a partition
 explain TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
 TRUNCATE TABLE srcpart_truncate partition (ds='2008-04-08', hr='11');
 select * from srcpart_truncate where ds='2008-04-08' and hr='11';
+select count(*) from srcpart_truncate where ds='2008-04-08' and hr='11';
 
 -- truncate partitions with partial spec
 explain TRUNCATE TABLE srcpart_truncate partition (ds, hr='12');
 TRUNCATE TABLE srcpart_truncate partition (ds, hr='12');
 select * from srcpart_truncate where hr='12';
+select count(*) from srcpart_truncate where hr='12';
 
 -- truncate partitioned table
 explain TRUNCATE TABLE srcpart_truncate;
 TRUNCATE TABLE srcpart_truncate;
 select * from srcpart_truncate;
+select count(*) from srcpart_truncate;

Modified: 
hive/branches/spark/ql/src/test/queries/clientpositive/udf_format_number.q
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/queries/clientpositive/udf_format_number.q?rev=1613740&r1=1613739&r2=1613740&view=diff
==============================================================================
--- hive/branches/spark/ql/src/test/queries/clientpositive/udf_format_number.q 
(original)
+++ hive/branches/spark/ql/src/test/queries/clientpositive/udf_format_number.q 
Sat Jul 26 23:45:46 2014
@@ -63,3 +63,11 @@ SELECT format_number(CAST(12332.123456 A
     format_number(CAST(12332.1 AS FLOAT), 4),
     format_number(CAST(-12332.2 AS FLOAT), 0)
 FROM src tablesample (1 rows);
+
+-- decimals
+SELECT format_number(12332.123456BD, 4),
+    format_number(12332.123456BD, 2),
+    format_number(12332.1BD, 4),
+    format_number(-12332.2BD, 0),
+    format_number(CAST(12332.567 AS DECIMAL(8, 1)), 4)
+FROM src tablesample (1 rows);


Reply via email to