http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
index d79b6ed..51df754 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
@@ -68,6 +68,8 @@ import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.thrift.TException;
 
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
+
 public class SessionHiveMetaStoreClient extends HiveMetaStoreClient implements 
IMetaStoreClient {
 
   SessionHiveMetaStoreClient(Configuration conf, Boolean allowEmbedded) throws 
MetaException {
@@ -88,6 +90,10 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     return wh;
   }
 
+  // TODO CAT - a number of these need to be updated.  Don't bother with 
deprecated methods as
+  // this is just an internal class.  Wait until we're ready to move all the 
catalog stuff up
+  // into ql.
+
   @Override
   protected void create_table_with_environment_context(
       org.apache.hadoop.hive.metastore.api.Table tbl, EnvironmentContext 
envContext)
@@ -103,10 +109,13 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
   }
 
   @Override
-  protected void drop_table_with_environment_context(String dbname, String 
name,
+  protected void drop_table_with_environment_context(String catName, String 
dbname, String name,
       boolean deleteData, EnvironmentContext envContext) throws MetaException, 
TException,
       NoSuchObjectException, UnsupportedOperationException {
     // First try temp table
+    // TODO CAT - I think the right thing here is to always put temp tables in 
the current
+    // catalog.  But we don't yet have a notion of current catalog, so we'll 
have to hold on
+    // until we do.
     org.apache.hadoop.hive.metastore.api.Table table = getTempTable(dbname, 
name);
     if (table != null) {
       try {
@@ -120,7 +129,7 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     }
 
     // Try underlying client
-    super.drop_table_with_environment_context(dbname,  name, deleteData, 
envContext);
+    super.drop_table_with_environment_context(catName, dbname,  name, 
deleteData, envContext);
   }
 
   @Override
@@ -143,9 +152,20 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     if (table != null) {
       return deepCopy(table);  // Original method used deepCopy(), do the same 
here.
     }
-
     // Try underlying client
-    return super.getTable(dbname, name);
+    return super.getTable(DEFAULT_CATALOG_NAME, dbname, name);
+  }
+
+  // Need to override this one too or dropTable breaks because it doesn't find 
the table when checks
+  // before the drop.
+  @Override
+  public org.apache.hadoop.hive.metastore.api.Table getTable(String catName, 
String dbName,
+                                                             String tableName) 
throws TException {
+    if (!DEFAULT_CATALOG_NAME.equals(catName)) {
+      return super.getTable(catName, dbName, tableName);
+    } else {
+      return getTable(dbName, tableName);
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 5eefc15..7470c8a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -108,6 +108,9 @@ import org.slf4j.LoggerFactory;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.ImmutableList;
 
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
+import static 
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
+
 /**
  * BaseSemanticAnalyzer.
  *
@@ -646,16 +649,16 @@ public abstract class BaseSemanticAnalyzer {
   }
 
   protected List<FieldSchema> getColumns(ASTNode ast) throws SemanticException 
{
-    return getColumns(ast, true);
+    return getColumns(ast, true, conf);
   }
 
   /**
    * Get the list of FieldSchema out of the ASTNode.
    */
-  public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase) 
throws SemanticException {
-    return getColumns(ast, lowerCase, null,new ArrayList<SQLPrimaryKey>(), new 
ArrayList<SQLForeignKey>(),
-            new ArrayList<SQLUniqueConstraint>(), new 
ArrayList<SQLNotNullConstraint>(),
-        new ArrayList<SQLDefaultConstraint>(), new 
ArrayList<SQLCheckConstraint>());
+  public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase, 
Configuration conf)
+      throws SemanticException {
+    return getColumns(ast, lowerCase, null, new ArrayList<>(), new 
ArrayList<>(), new ArrayList<>(),
+            new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), conf);
   }
 
   private static class ConstraintInfo {
@@ -717,79 +720,83 @@ public abstract class BaseSemanticAnalyzer {
   /**
    * Process the unique constraints from the ast node and populate the 
SQLUniqueConstraint list.
    */
-  protected static void processUniqueConstraints(String databaseName, String 
tableName,
+  protected static void processUniqueConstraints(String catName, String 
databaseName, String tableName,
       ASTNode child, List<SQLUniqueConstraint> uniqueConstraints) throws 
SemanticException {
     List<ConstraintInfo> uniqueInfos = new ArrayList<ConstraintInfo>();
     generateConstraintInfos(child, uniqueInfos);
-    constraintInfosToUniqueConstraints(databaseName, tableName, uniqueInfos, 
uniqueConstraints);
+    constraintInfosToUniqueConstraints(catName, databaseName, tableName, 
uniqueInfos, uniqueConstraints);
   }
 
-  protected static void processUniqueConstraints(String databaseName, String 
tableName,
+  protected static void processUniqueConstraints(String catName, String 
databaseName, String tableName,
       ASTNode child, List<String> columnNames, List<SQLUniqueConstraint> 
uniqueConstraints)
           throws SemanticException {
     List<ConstraintInfo> uniqueInfos = new ArrayList<ConstraintInfo>();
     generateConstraintInfos(child, columnNames, uniqueInfos, null, null);
-    constraintInfosToUniqueConstraints(databaseName, tableName, uniqueInfos, 
uniqueConstraints);
+    constraintInfosToUniqueConstraints(catName, databaseName, tableName, 
uniqueInfos, uniqueConstraints);
   }
 
-  private static void constraintInfosToUniqueConstraints(String databaseName, 
String tableName,
+  private static void constraintInfosToUniqueConstraints(String catName, 
String databaseName, String tableName,
           List<ConstraintInfo> uniqueInfos, List<SQLUniqueConstraint> 
uniqueConstraints) {
     int i = 1;
     for (ConstraintInfo uniqueInfo : uniqueInfos) {
-      uniqueConstraints.add(new SQLUniqueConstraint(databaseName, tableName, 
uniqueInfo.colName,
+      uniqueConstraints.add(new SQLUniqueConstraint(catName, databaseName, 
tableName, uniqueInfo.colName,
               i++, uniqueInfo.constraintName, uniqueInfo.enable, 
uniqueInfo.validate, uniqueInfo.rely));
     }
   }
 
-  protected static void processCheckConstraints(String databaseName, String 
tableName,
+  protected static void processCheckConstraints(String catName, String 
databaseName, String tableName,
                                                   ASTNode child, List<String> 
columnNames,
                                                 List<SQLCheckConstraint> 
checkConstraints, final ASTNode typeChild,
                                                 final TokenRewriteStream 
tokenRewriteStream)
       throws SemanticException {
     List<ConstraintInfo> checkInfos = new ArrayList<ConstraintInfo>();
     generateConstraintInfos(child, columnNames, checkInfos, typeChild, 
tokenRewriteStream);
-    constraintInfosToCheckConstraints(databaseName, tableName, checkInfos, 
checkConstraints);
+    constraintInfosToCheckConstraints(catName, databaseName, tableName, 
checkInfos, checkConstraints);
   }
 
-  private static void constraintInfosToCheckConstraints(String databaseName, 
String tableName,
+  private static void constraintInfosToCheckConstraints(String catName, String 
databaseName, String tableName,
                                                           List<ConstraintInfo> 
checkInfos,
                                                         
List<SQLCheckConstraint> checkConstraints) {
     for (ConstraintInfo checkInfo : checkInfos) {
-      checkConstraints.add(new SQLCheckConstraint(databaseName, tableName, 
checkInfo.colName,
+      checkConstraints.add(new SQLCheckConstraint(catName, databaseName, 
tableName, checkInfo.colName,
                                                       checkInfo.defaultValue, 
checkInfo.constraintName, checkInfo.enable,
                                                       checkInfo.validate, 
checkInfo.rely));
     }
   }
-  protected static void processDefaultConstraints(String databaseName, String 
tableName,
+
+  protected static void processDefaultConstraints(String catName, String 
databaseName, String tableName,
       ASTNode child, List<String> columnNames, List<SQLDefaultConstraint> 
defaultConstraints, final ASTNode typeChild)
       throws SemanticException {
     List<ConstraintInfo> defaultInfos = new ArrayList<ConstraintInfo>();
     generateConstraintInfos(child, columnNames, defaultInfos, typeChild, null);
-    constraintInfosToDefaultConstraints(databaseName, tableName, defaultInfos, 
defaultConstraints);
+    constraintInfosToDefaultConstraints(catName, databaseName, tableName, 
defaultInfos, defaultConstraints);
   }
 
-  private static void constraintInfosToDefaultConstraints(String databaseName, 
String tableName,
+  private static void constraintInfosToDefaultConstraints(
+      String catName, String databaseName, String tableName,
      List<ConstraintInfo> defaultInfos, List<SQLDefaultConstraint> 
defaultConstraints) {
     for (ConstraintInfo defaultInfo : defaultInfos) {
-      defaultConstraints.add(new SQLDefaultConstraint(databaseName, tableName, 
defaultInfo.colName,
-          defaultInfo.defaultValue, defaultInfo.constraintName, 
defaultInfo.enable,
-          defaultInfo.validate, defaultInfo.rely));
+      defaultConstraints.add(new SQLDefaultConstraint(catName, databaseName, 
tableName,
+          defaultInfo.colName, defaultInfo.defaultValue, 
defaultInfo.constraintName,
+          defaultInfo.enable, defaultInfo.validate, defaultInfo.rely));
     }
   }
 
-  protected static void processNotNullConstraints(String databaseName, String 
tableName,
+  protected static void processNotNullConstraints(String catName, String 
databaseName, String tableName,
       ASTNode child, List<String> columnNames, List<SQLNotNullConstraint> 
notNullConstraints)
           throws SemanticException {
     List<ConstraintInfo> notNullInfos = new ArrayList<ConstraintInfo>();
     generateConstraintInfos(child, columnNames, notNullInfos, null, null);
-    constraintInfosToNotNullConstraints(databaseName, tableName, notNullInfos, 
notNullConstraints);
+    constraintInfosToNotNullConstraints(catName, databaseName, tableName, 
notNullInfos, notNullConstraints);
   }
 
-  private static void constraintInfosToNotNullConstraints(String databaseName, 
String tableName,
-          List<ConstraintInfo> notNullInfos, List<SQLNotNullConstraint> 
notNullConstraints) {
+  private static void constraintInfosToNotNullConstraints(
+      String catName, String databaseName, String tableName, 
List<ConstraintInfo> notNullInfos,
+      List<SQLNotNullConstraint> notNullConstraints) {
     for (ConstraintInfo notNullInfo : notNullInfos) {
-      notNullConstraints.add(new SQLNotNullConstraint(databaseName, tableName, 
notNullInfo.colName,
-              notNullInfo.constraintName, notNullInfo.enable, 
notNullInfo.validate, notNullInfo.rely));
+      notNullConstraints.add(new SQLNotNullConstraint(catName, databaseName, 
tableName,
+          notNullInfo.colName, notNullInfo.constraintName, notNullInfo.enable, 
notNullInfo.validate,
+          notNullInfo.rely));
     }
   }
 
@@ -1176,13 +1183,12 @@ public abstract class BaseSemanticAnalyzer {
    * Get the list of FieldSchema out of the ASTNode.
    * Additionally, populate the primaryKeys and foreignKeys if any.
    */
-  public static List<FieldSchema> getColumns(ASTNode ast, boolean lowerCase,
-    TokenRewriteStream tokenRewriteStream,
-    List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
-    List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> 
notNullConstraints,
-                                           List<SQLDefaultConstraint> 
defaultConstraints,
-                                             List<SQLCheckConstraint> 
checkConstraints)
-        throws SemanticException {
+  public static List<FieldSchema> getColumns(
+      ASTNode ast, boolean lowerCase, TokenRewriteStream tokenRewriteStream,
+      List<SQLPrimaryKey> primaryKeys, List<SQLForeignKey> foreignKeys,
+      List<SQLUniqueConstraint> uniqueConstraints, List<SQLNotNullConstraint> 
notNullConstraints,
+      List<SQLDefaultConstraint> defaultConstraints, List<SQLCheckConstraint> 
checkConstraints,
+      Configuration conf) throws SemanticException {
     List<FieldSchema> colList = new ArrayList<FieldSchema>();
     Tree parent = ast.getParent();
 
@@ -1192,7 +1198,11 @@ public abstract class BaseSemanticAnalyzer {
       switch (child.getToken().getType()) {
         case HiveParser.TOK_UNIQUE: {
             String[] qualifiedTabName = getQualifiedTableName((ASTNode) 
parent.getChild(0));
-            processUniqueConstraints(qualifiedTabName[0], qualifiedTabName[1], 
child, uniqueConstraints);
+            // TODO CAT - for now always use the default catalog.  Eventually 
will want to see if
+            // the user specified a catalog
+            String catName = MetaStoreUtils.getDefaultCatalog(conf);
+            processUniqueConstraints(catName, qualifiedTabName[0], 
qualifiedTabName[1], child,
+                uniqueConstraints);
           }
           break;
         case HiveParser.TOK_PRIMARY_KEY: {
@@ -1237,23 +1247,26 @@ public abstract class BaseSemanticAnalyzer {
             }
             if (constraintChild != null) {
               String[] qualifiedTabName = getQualifiedTableName((ASTNode) 
parent.getChild(0));
+              // TODO CAT - for now always use the default catalog.  
Eventually will want to see if
+              // the user specified a catalog
+              String catName = MetaStoreUtils.getDefaultCatalog(conf);
               // Process column constraint
               switch (constraintChild.getToken().getType()) {
               case HiveParser.TOK_CHECK_CONSTRAINT:
-                processCheckConstraints(qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
+                processCheckConstraints(catName, qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
                                           ImmutableList.of(col.getName()), 
checkConstraints, typeChild,
                                         tokenRewriteStream);
                 break;
               case HiveParser.TOK_DEFAULT_VALUE:
-                processDefaultConstraints(qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
+                processDefaultConstraints(catName, qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
                     ImmutableList.of(col.getName()), defaultConstraints, 
typeChild);
                 break;
                 case HiveParser.TOK_NOT_NULL:
-                  processNotNullConstraints(qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
+                  processNotNullConstraints(catName, qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
                           ImmutableList.of(col.getName()), notNullConstraints);
                   break;
                 case HiveParser.TOK_UNIQUE:
-                  processUniqueConstraints(qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
+                  processUniqueConstraints(catName, qualifiedTabName[0], 
qualifiedTabName[1], constraintChild,
                           ImmutableList.of(col.getName()), uniqueConstraints);
                   break;
                 case HiveParser.TOK_PRIMARY_KEY:

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 74c271d..ed1c0ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -285,6 +285,9 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     case HiveParser.TOK_ALTERTABLE: {
       ast = (ASTNode) input.getChild(1);
       String[] qualified = getQualifiedTableName((ASTNode) input.getChild(0));
+      // TODO CAT - for now always use the default catalog.  Eventually will 
want to see if
+      // the user specified a catalog
+      String catName = MetaStoreUtils.getDefaultCatalog(conf);
       String tableName = getDotName(qualified);
       HashMap<String, String> partSpec = null;
       ASTNode partSpecNode = (ASTNode)input.getChild(2);
@@ -312,7 +315,7 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_REPLACECOLS) {
         analyzeAlterTableModifyCols(qualified, ast, partSpec, 
AlterTableTypes.REPLACECOLS);
       } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_RENAMECOL) {
-        analyzeAlterTableRenameCol(qualified, ast, partSpec);
+        analyzeAlterTableRenameCol(catName, qualified, ast, partSpec);
       } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_ADDPARTS) {
         analyzeAlterTableAddParts(qualified, ast, false);
       } else if (ast.getType() == HiveParser.TOK_ALTERTABLE_DROPPARTS) {
@@ -2152,6 +2155,9 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     throws SemanticException {
     ASTNode parent = (ASTNode) ast.getParent();
     String[] qualifiedTabName = getQualifiedTableName((ASTNode) 
parent.getChild(0));
+    // TODO CAT - for now always use the default catalog.  Eventually will 
want to see if
+    // the user specified a catalog
+    String catName = MetaStoreUtils.getDefaultCatalog(conf);
     ASTNode child = (ASTNode) ast.getChild(0);
     List<SQLPrimaryKey> primaryKeys = new ArrayList<>();
     List<SQLForeignKey> foreignKeys = new ArrayList<>();
@@ -2159,7 +2165,7 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     switch (child.getToken().getType()) {
       case HiveParser.TOK_UNIQUE:
-        BaseSemanticAnalyzer.processUniqueConstraints(qualifiedTabName[0], 
qualifiedTabName[1],
+        BaseSemanticAnalyzer.processUniqueConstraints(catName, 
qualifiedTabName[0], qualifiedTabName[1],
                 child, uniqueConstraints);
         break;
       case HiveParser.TOK_PRIMARY_KEY:
@@ -3078,7 +3084,7 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         alterTblDesc)));
   }
 
-  private void analyzeAlterTableRenameCol(String[] qualified, ASTNode ast,
+  private void analyzeAlterTableRenameCol(String catName, String[] qualified, 
ASTNode ast,
       HashMap<String, String> partSpec) throws SemanticException {
     String newComment = null;
     boolean first = false;
@@ -3122,23 +3128,23 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       switch (constraintChild.getToken().getType()) {
       case HiveParser.TOK_CHECK_CONSTRAINT:
         checkConstraints = new ArrayList<>();
-        processCheckConstraints(qualified[0], qualified[1], constraintChild,
+        processCheckConstraints(catName, qualified[0], qualified[1], 
constraintChild,
                                   ImmutableList.of(newColName), 
checkConstraints, (ASTNode)ast.getChild(2),
                                 this.ctx.getTokenRewriteStream());
         break;
       case HiveParser.TOK_DEFAULT_VALUE:
         defaultConstraints = new ArrayList<>();
-        processDefaultConstraints(qualified[0], qualified[1], constraintChild,
+        processDefaultConstraints(catName, qualified[0], qualified[1], 
constraintChild,
                                   ImmutableList.of(newColName), 
defaultConstraints, (ASTNode)ast.getChild(2));
         break;
       case HiveParser.TOK_NOT_NULL:
         notNullConstraints = new ArrayList<>();
-        processNotNullConstraints(qualified[0], qualified[1], constraintChild,
+        processNotNullConstraints(catName, qualified[0], qualified[1], 
constraintChild,
                                   ImmutableList.of(newColName), 
notNullConstraints);
         break;
       case HiveParser.TOK_UNIQUE:
         uniqueConstraints = new ArrayList<>();
-        processUniqueConstraints(qualified[0], qualified[1], constraintChild,
+        processUniqueConstraints(catName, qualified[0], qualified[1], 
constraintChild,
                                  ImmutableList.of(newColName), 
uniqueConstraints);
         break;
       case HiveParser.TOK_PRIMARY_KEY:

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
index 762e438..88b6068 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
@@ -86,7 +86,7 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
 
     List<FieldSchema> arguments =
-      BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true);
+      BaseSemanticAnalyzer.getColumns((ASTNode)ast.getChild(1), true, conf);
     boolean isNoArgumentMacro = arguments.size() == 0;
     RowResolver rowResolver = new RowResolver();
     ArrayList<String> macroColNames = new ArrayList<String>(arguments.size());

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 26f20f2..53d5a12 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -10957,7 +10957,6 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         if (partitions != null) {
           for (Partition partn : partitions) {
             // inputs.add(new ReadEntity(partn)); // is this needed at all?
-            LOG.info("XXX: adding part: "+partn);
             outputs.add(new WriteEntity(partn, 
WriteEntity.WriteType.DDL_NO_LOCK));
           }
         }
@@ -12723,14 +12722,14 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         break;
       case HiveParser.TOK_TABCOLLIST:
         cols = getColumns(child, true, ctx.getTokenRewriteStream(), 
primaryKeys, foreignKeys,
-            uniqueConstraints, notNullConstraints, defaultConstraints, 
checkConstraints);
+            uniqueConstraints, notNullConstraints, defaultConstraints, 
checkConstraints, conf);
         break;
       case HiveParser.TOK_TABLECOMMENT:
         comment = unescapeSQLString(child.getChild(0).getText());
         break;
       case HiveParser.TOK_TABLEPARTCOLS:
-        partCols = getColumns(child, false, 
ctx.getTokenRewriteStream(),primaryKeys, foreignKeys,
-            uniqueConstraints, notNullConstraints, defaultConstraints, 
checkConstraints);
+        partCols = getColumns(child, false, ctx.getTokenRewriteStream(), 
primaryKeys, foreignKeys,
+            uniqueConstraints, notNullConstraints, defaultConstraints, 
checkConstraints, conf);
         if(hasConstraints(partCols, defaultConstraints, notNullConstraints, 
checkConstraints)) {
           //TODO: these constraints should be supported for partition columns
           throw new SemanticException(

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
index 2c7064b..8a7c06d 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProviderBase.java
@@ -21,6 +21,8 @@ package org.apache.hadoop.hive.ql.security.authorization;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.IHMSHandler;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -89,7 +91,7 @@ public abstract class HiveAuthorizationProviderBase implements
         return Hive.getWithFastCheck(conf).getDatabase(dbName);
       } else {
         try {
-          return handler.get_database_core(dbName);
+          return 
handler.get_database_core(MetaStoreUtils.getDefaultCatalog(conf), dbName);
         } catch (NoSuchObjectException e) {
           throw new HiveException(e);
         } catch (MetaException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
index 233a48c..ca4b667 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java
@@ -43,7 +43,8 @@ public class AuthorizationMetaStoreFilterHook extends 
DefaultMetaStoreFilterHook
   }
 
   @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) 
throws MetaException {
+  public List<String> filterTableNames(String catName, String dbName, 
List<String> tableList)
+      throws MetaException {
     List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbName, tableList);
     return getTableNames(getFilteredObjects(listObjs));
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java 
b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
index ade7726..dd0929f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
@@ -46,6 +46,8 @@ import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import static 
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
+
 /**
  * Superclass for all threads in the compactor.
  */
@@ -102,7 +104,7 @@ abstract class CompactorThread extends Thread implements 
MetaStoreThread {
    */
   protected Table resolveTable(CompactionInfo ci) throws MetaException {
     try {
-      return rs.getTable(ci.dbname, ci.tableName);
+      return rs.getTable(getDefaultCatalog(conf), ci.dbname, ci.tableName);
     } catch (MetaException e) {
       LOG.error("Unable to find table " + ci.getFullTableName() + ", " + 
e.getMessage());
       throw e;
@@ -120,7 +122,7 @@ abstract class CompactorThread extends Thread implements 
MetaStoreThread {
     if (ci.partName != null) {
       List<Partition> parts;
       try {
-        parts = rs.getPartitionsByNames(ci.dbname, ci.tableName,
+        parts = rs.getPartitionsByNames(getDefaultCatalog(conf), ci.dbname, 
ci.tableName,
             Collections.singletonList(ci.partName));
         if (parts == null || parts.size() == 0) {
           // The partition got dropped before we went looking for it.

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out 
b/ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out
index 30ad841..acf65f2 100644
--- 
a/ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out
+++ 
b/ql/src/test/results/clientnegative/alter_table_constraint_duplicate_pk.q.out
@@ -8,4 +8,4 @@ POSTHOOK: Output: database:default
 POSTHOOK: Output: default@table1
 PREHOOK: query: alter table table1 add constraint pk4 primary key (b) disable 
novalidate rely
 PREHOOK: type: ALTERTABLE_ADDCONSTRAINT
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message: Primary key 
already exists for: default.table1)
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. MetaException(message: Primary key 
already exists for: hive.default.table1)

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/ql/src/test/results/clientnegative/create_view_failure2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/create_view_failure2.q.out 
b/ql/src/test/results/clientnegative/create_view_failure2.q.out
index 3342a23..ad5d5fe 100644
--- a/ql/src/test/results/clientnegative/create_view_failure2.q.out
+++ b/ql/src/test/results/clientnegative/create_view_failure2.q.out
@@ -17,4 +17,4 @@ PREHOOK: query: CREATE TABLE xxx4(key int)
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: database:default
 PREHOOK: Output: default@xxx4
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Table 
xxx4 already exists)
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. AlreadyExistsException(message:Table 
hive.default.xxx4 already exists)

Reply via email to