http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
----------------------------------------------------------------------
diff --git a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb 
b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
index aa93158..969f4ab 100644
--- a/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
+++ b/standalone-metastore/src/gen/thrift/gen-rb/hive_metastore_types.rb
@@ -231,6 +231,7 @@ class SQLPrimaryKey
   ENABLE_CSTR = 6
   VALIDATE_CSTR = 7
   RELY_CSTR = 8
+  CATNAME = 9
 
   FIELDS = {
     TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
@@ -240,7 +241,8 @@ class SQLPrimaryKey
     PK_NAME => {:type => ::Thrift::Types::STRING, :name => 'pk_name'},
     ENABLE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'enable_cstr'},
     VALIDATE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 
'validate_cstr'},
-    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'}
+    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -267,6 +269,7 @@ class SQLForeignKey
   ENABLE_CSTR = 12
   VALIDATE_CSTR = 13
   RELY_CSTR = 14
+  CATNAME = 15
 
   FIELDS = {
     PKTABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'pktable_db'},
@@ -282,7 +285,8 @@ class SQLForeignKey
     PK_NAME => {:type => ::Thrift::Types::STRING, :name => 'pk_name'},
     ENABLE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'enable_cstr'},
     VALIDATE_CSTR => {:type => ::Thrift::Types::BOOL, :name => 
'validate_cstr'},
-    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'}
+    RELY_CSTR => {:type => ::Thrift::Types::BOOL, :name => 'rely_cstr'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -295,16 +299,18 @@ end
 
 class SQLUniqueConstraint
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  TABLE_DB = 1
-  TABLE_NAME = 2
-  COLUMN_NAME = 3
-  KEY_SEQ = 4
-  UK_NAME = 5
-  ENABLE_CSTR = 6
-  VALIDATE_CSTR = 7
-  RELY_CSTR = 8
+  CATNAME = 1
+  TABLE_DB = 2
+  TABLE_NAME = 3
+  COLUMN_NAME = 4
+  KEY_SEQ = 5
+  UK_NAME = 6
+  ENABLE_CSTR = 7
+  VALIDATE_CSTR = 8
+  RELY_CSTR = 9
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
     TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
     COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
@@ -325,15 +331,17 @@ end
 
 class SQLNotNullConstraint
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  TABLE_DB = 1
-  TABLE_NAME = 2
-  COLUMN_NAME = 3
-  NN_NAME = 4
-  ENABLE_CSTR = 5
-  VALIDATE_CSTR = 6
-  RELY_CSTR = 7
+  CATNAME = 1
+  TABLE_DB = 2
+  TABLE_NAME = 3
+  COLUMN_NAME = 4
+  NN_NAME = 5
+  ENABLE_CSTR = 6
+  VALIDATE_CSTR = 7
+  RELY_CSTR = 8
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
     TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
     COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
@@ -353,16 +361,18 @@ end
 
 class SQLDefaultConstraint
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  TABLE_DB = 1
-  TABLE_NAME = 2
-  COLUMN_NAME = 3
-  DEFAULT_VALUE = 4
-  DC_NAME = 5
-  ENABLE_CSTR = 6
-  VALIDATE_CSTR = 7
-  RELY_CSTR = 8
+  CATNAME = 1
+  TABLE_DB = 2
+  TABLE_NAME = 3
+  COLUMN_NAME = 4
+  DEFAULT_VALUE = 5
+  DC_NAME = 6
+  ENABLE_CSTR = 7
+  VALIDATE_CSTR = 8
+  RELY_CSTR = 9
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
     TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
     COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
@@ -383,16 +393,18 @@ end
 
 class SQLCheckConstraint
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  TABLE_DB = 1
-  TABLE_NAME = 2
-  COLUMN_NAME = 3
-  CHECK_EXPRESSION = 4
-  DC_NAME = 5
-  ENABLE_CSTR = 6
-  VALIDATE_CSTR = 7
-  RELY_CSTR = 8
+  CATNAME = 1
+  TABLE_DB = 2
+  TABLE_NAME = 3
+  COLUMN_NAME = 4
+  CHECK_EXPRESSION = 5
+  DC_NAME = 6
+  ENABLE_CSTR = 7
+  VALIDATE_CSTR = 8
+  RELY_CSTR = 9
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     TABLE_DB => {:type => ::Thrift::Types::STRING, :name => 'table_db'},
     TABLE_NAME => {:type => ::Thrift::Types::STRING, :name => 'table_name'},
     COLUMN_NAME => {:type => ::Thrift::Types::STRING, :name => 'column_name'},
@@ -440,13 +452,15 @@ class HiveObjectRef
   OBJECTNAME = 3
   PARTVALUES = 4
   COLUMNNAME = 5
+  CATNAME = 6
 
   FIELDS = {
     OBJECTTYPE => {:type => ::Thrift::Types::I32, :name => 'objectType', 
:enum_class => ::HiveObjectType},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     OBJECTNAME => {:type => ::Thrift::Types::STRING, :name => 'objectName'},
     PARTVALUES => {:type => ::Thrift::Types::LIST, :name => 'partValues', 
:element => {:type => ::Thrift::Types::STRING}},
-    COLUMNNAME => {:type => ::Thrift::Types::STRING, :name => 'columnName'}
+    COLUMNNAME => {:type => ::Thrift::Types::STRING, :name => 'columnName'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -768,6 +782,106 @@ class GrantRevokeRoleResponse
   ::Thrift::Struct.generate_accessors self
 end
 
+class Catalog
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NAME = 1
+  DESCRIPTION = 2
+  LOCATIONURI = 3
+
+  FIELDS = {
+    NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
+    DESCRIPTION => {:type => ::Thrift::Types::STRING, :name => 'description', 
:optional => true},
+    LOCATIONURI => {:type => ::Thrift::Types::STRING, :name => 'locationUri'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class CreateCatalogRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  CATALOG = 1
+
+  FIELDS = {
+    CATALOG => {:type => ::Thrift::Types::STRUCT, :name => 'catalog', :class 
=> ::Catalog}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetCatalogRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NAME = 1
+
+  FIELDS = {
+    NAME => {:type => ::Thrift::Types::STRING, :name => 'name'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetCatalogResponse
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  CATALOG = 1
+
+  FIELDS = {
+    CATALOG => {:type => ::Thrift::Types::STRUCT, :name => 'catalog', :class 
=> ::Catalog}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class GetCatalogsResponse
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NAMES = 1
+
+  FIELDS = {
+    NAMES => {:type => ::Thrift::Types::LIST, :name => 'names', :element => 
{:type => ::Thrift::Types::STRING}}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
+class DropCatalogRequest
+  include ::Thrift::Struct, ::Thrift::Struct_Union
+  NAME = 1
+
+  FIELDS = {
+    NAME => {:type => ::Thrift::Types::STRING, :name => 'name'}
+  }
+
+  def struct_fields; FIELDS; end
+
+  def validate
+  end
+
+  ::Thrift::Struct.generate_accessors self
+end
+
 class Database
   include ::Thrift::Struct, ::Thrift::Struct_Union
   NAME = 1
@@ -777,6 +891,7 @@ class Database
   PRIVILEGES = 5
   OWNERNAME = 6
   OWNERTYPE = 7
+  CATALOGNAME = 8
 
   FIELDS = {
     NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
@@ -785,7 +900,8 @@ class Database
     PARAMETERS => {:type => ::Thrift::Types::MAP, :name => 'parameters', :key 
=> {:type => ::Thrift::Types::STRING}, :value => {:type => 
::Thrift::Types::STRING}},
     PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', 
:class => ::PrincipalPrivilegeSet, :optional => true},
     OWNERNAME => {:type => ::Thrift::Types::STRING, :name => 'ownerName', 
:optional => true},
-    OWNERTYPE => {:type => ::Thrift::Types::I32, :name => 'ownerType', 
:optional => true, :enum_class => ::PrincipalType}
+    OWNERTYPE => {:type => ::Thrift::Types::I32, :name => 'ownerType', 
:optional => true, :enum_class => ::PrincipalType},
+    CATALOGNAME => {:type => ::Thrift::Types::STRING, :name => 'catalogName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -924,6 +1040,7 @@ class Table
   TEMPORARY = 14
   REWRITEENABLED = 15
   CREATIONMETADATA = 16
+  CATNAME = 17
 
   FIELDS = {
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName'},
@@ -941,7 +1058,8 @@ class Table
     PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', 
:class => ::PrincipalPrivilegeSet, :optional => true},
     TEMPORARY => {:type => ::Thrift::Types::BOOL, :name => 'temporary', 
:default => false, :optional => true},
     REWRITEENABLED => {:type => ::Thrift::Types::BOOL, :name => 
'rewriteEnabled', :optional => true},
-    CREATIONMETADATA => {:type => ::Thrift::Types::STRUCT, :name => 
'creationMetadata', :class => ::CreationMetadata, :optional => true}
+    CREATIONMETADATA => {:type => ::Thrift::Types::STRUCT, :name => 
'creationMetadata', :class => ::CreationMetadata, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -962,6 +1080,7 @@ class Partition
   SD = 6
   PARAMETERS = 7
   PRIVILEGES = 8
+  CATNAME = 9
 
   FIELDS = {
     VALUES => {:type => ::Thrift::Types::LIST, :name => 'values', :element => 
{:type => ::Thrift::Types::STRING}},
@@ -971,7 +1090,8 @@ class Partition
     LASTACCESSTIME => {:type => ::Thrift::Types::I32, :name => 
'lastAccessTime'},
     SD => {:type => ::Thrift::Types::STRUCT, :name => 'sd', :class => 
::StorageDescriptor},
     PARAMETERS => {:type => ::Thrift::Types::MAP, :name => 'parameters', :key 
=> {:type => ::Thrift::Types::STRING}, :value => {:type => 
::Thrift::Types::STRING}},
-    PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', 
:class => ::PrincipalPrivilegeSet, :optional => true}
+    PRIVILEGES => {:type => ::Thrift::Types::STRUCT, :name => 'privileges', 
:class => ::PrincipalPrivilegeSet, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1049,13 +1169,15 @@ class PartitionSpec
   ROOTPATH = 3
   SHAREDSDPARTITIONSPEC = 4
   PARTITIONLIST = 5
+  CATNAME = 6
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName'},
     ROOTPATH => {:type => ::Thrift::Types::STRING, :name => 'rootPath'},
     SHAREDSDPARTITIONSPEC => {:type => ::Thrift::Types::STRUCT, :name => 
'sharedSDPartitionSpec', :class => ::PartitionSpecWithSharedSD, :optional => 
true},
-    PARTITIONLIST => {:type => ::Thrift::Types::STRUCT, :name => 
'partitionList', :class => ::PartitionListComposingSpec, :optional => true}
+    PARTITIONLIST => {:type => ::Thrift::Types::STRUCT, :name => 
'partitionList', :class => ::PartitionListComposingSpec, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1374,13 +1496,15 @@ class ColumnStatisticsDesc
   TABLENAME = 3
   PARTNAME = 4
   LASTANALYZED = 5
+  CATNAME = 6
 
   FIELDS = {
     ISTBLLEVEL => {:type => ::Thrift::Types::BOOL, :name => 'isTblLevel'},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName'},
     PARTNAME => {:type => ::Thrift::Types::STRING, :name => 'partName', 
:optional => true},
-    LASTANALYZED => {:type => ::Thrift::Types::I64, :name => 'lastAnalyzed', 
:optional => true}
+    LASTANALYZED => {:type => ::Thrift::Types::I64, :name => 'lastAnalyzed', 
:optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1491,10 +1615,12 @@ class PrimaryKeysRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
   DB_NAME = 1
   TBL_NAME = 2
+  CATNAME = 3
 
   FIELDS = {
     DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
-    TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
+    TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1530,12 +1656,14 @@ class ForeignKeysRequest
   PARENT_TBL_NAME = 2
   FOREIGN_DB_NAME = 3
   FOREIGN_TBL_NAME = 4
+  CATNAME = 5
 
   FIELDS = {
     PARENT_DB_NAME => {:type => ::Thrift::Types::STRING, :name => 
'parent_db_name'},
     PARENT_TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 
'parent_tbl_name'},
     FOREIGN_DB_NAME => {:type => ::Thrift::Types::STRING, :name => 
'foreign_db_name'},
-    FOREIGN_TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 
'foreign_tbl_name'}
+    FOREIGN_TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 
'foreign_tbl_name'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1565,10 +1693,12 @@ end
 
 class UniqueConstraintsRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DB_NAME = 1
-  TBL_NAME = 2
+  CATNAME = 1
+  DB_NAME = 2
+  TBL_NAME = 3
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
     TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
   }
@@ -1576,6 +1706,7 @@ class UniqueConstraintsRequest
   def struct_fields; FIELDS; end
 
   def validate
+    raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field catName is unset!') unless @catName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field db_name is unset!') unless @db_name
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tbl_name is unset!') unless @tbl_name
   end
@@ -1602,10 +1733,12 @@ end
 
 class NotNullConstraintsRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DB_NAME = 1
-  TBL_NAME = 2
+  CATNAME = 1
+  DB_NAME = 2
+  TBL_NAME = 3
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
     TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
   }
@@ -1613,6 +1746,7 @@ class NotNullConstraintsRequest
   def struct_fields; FIELDS; end
 
   def validate
+    raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field catName is unset!') unless @catName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field db_name is unset!') unless @db_name
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tbl_name is unset!') unless @tbl_name
   end
@@ -1639,10 +1773,12 @@ end
 
 class DefaultConstraintsRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DB_NAME = 1
-  TBL_NAME = 2
+  CATNAME = 1
+  DB_NAME = 2
+  TBL_NAME = 3
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
     TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
   }
@@ -1650,6 +1786,7 @@ class DefaultConstraintsRequest
   def struct_fields; FIELDS; end
 
   def validate
+    raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field catName is unset!') unless @catName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field db_name is unset!') unless @db_name
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tbl_name is unset!') unless @tbl_name
   end
@@ -1676,10 +1813,12 @@ end
 
 class CheckConstraintsRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DB_NAME = 1
-  TBL_NAME = 2
+  CATNAME = 1
+  DB_NAME = 2
+  TBL_NAME = 3
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DB_NAME => {:type => ::Thrift::Types::STRING, :name => 'db_name'},
     TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'}
   }
@@ -1687,6 +1826,7 @@ class CheckConstraintsRequest
   def struct_fields; FIELDS; end
 
   def validate
+    raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field catName is unset!') unless @catName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field db_name is unset!') unless @db_name
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tbl_name is unset!') unless @tbl_name
   end
@@ -1716,11 +1856,13 @@ class DropConstraintRequest
   DBNAME = 1
   TABLENAME = 2
   CONSTRAINTNAME = 3
+  CATNAME = 4
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbname'},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tablename'},
-    CONSTRAINTNAME => {:type => ::Thrift::Types::STRING, :name => 
'constraintname'}
+    CONSTRAINTNAME => {:type => ::Thrift::Types::STRING, :name => 
'constraintname'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1863,13 +2005,15 @@ class PartitionsByExprRequest
   EXPR = 3
   DEFAULTPARTITIONNAME = 4
   MAXPARTS = 5
+  CATNAME = 6
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
     EXPR => {:type => ::Thrift::Types::STRING, :name => 'expr', :binary => 
true},
     DEFAULTPARTITIONNAME => {:type => ::Thrift::Types::STRING, :name => 
'defaultPartitionName', :optional => true},
-    MAXPARTS => {:type => ::Thrift::Types::I16, :name => 'maxParts', :default 
=> -1, :optional => true}
+    MAXPARTS => {:type => ::Thrift::Types::I16, :name => 'maxParts', :default 
=> -1, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1922,11 +2066,13 @@ class TableStatsRequest
   DBNAME = 1
   TBLNAME = 2
   COLNAMES = 3
+  CATNAME = 4
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
-    COLNAMES => {:type => ::Thrift::Types::LIST, :name => 'colNames', :element 
=> {:type => ::Thrift::Types::STRING}}
+    COLNAMES => {:type => ::Thrift::Types::LIST, :name => 'colNames', :element 
=> {:type => ::Thrift::Types::STRING}},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1946,12 +2092,14 @@ class PartitionsStatsRequest
   TBLNAME = 2
   COLNAMES = 3
   PARTNAMES = 4
+  CATNAME = 5
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
     COLNAMES => {:type => ::Thrift::Types::LIST, :name => 'colNames', :element 
=> {:type => ::Thrift::Types::STRING}},
-    PARTNAMES => {:type => ::Thrift::Types::LIST, :name => 'partNames', 
:element => {:type => ::Thrift::Types::STRING}}
+    PARTNAMES => {:type => ::Thrift::Types::LIST, :name => 'partNames', 
:element => {:type => ::Thrift::Types::STRING}},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -1989,13 +2137,15 @@ class AddPartitionsRequest
   PARTS = 3
   IFNOTEXISTS = 4
   NEEDRESULT = 5
+  CATNAME = 6
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
     PARTS => {:type => ::Thrift::Types::LIST, :name => 'parts', :element => 
{:type => ::Thrift::Types::STRUCT, :class => ::Partition}},
     IFNOTEXISTS => {:type => ::Thrift::Types::BOOL, :name => 'ifNotExists'},
-    NEEDRESULT => {:type => ::Thrift::Types::BOOL, :name => 'needResult', 
:default => true, :optional => true}
+    NEEDRESULT => {:type => ::Thrift::Types::BOOL, :name => 'needResult', 
:default => true, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -2084,6 +2234,7 @@ class DropPartitionsRequest
   IGNOREPROTECTION = 6
   ENVIRONMENTCONTEXT = 7
   NEEDRESULT = 8
+  CATNAME = 9
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
@@ -2093,7 +2244,8 @@ class DropPartitionsRequest
     IFEXISTS => {:type => ::Thrift::Types::BOOL, :name => 'ifExists', :default 
=> true, :optional => true},
     IGNOREPROTECTION => {:type => ::Thrift::Types::BOOL, :name => 
'ignoreProtection', :optional => true},
     ENVIRONMENTCONTEXT => {:type => ::Thrift::Types::STRUCT, :name => 
'environmentContext', :class => ::EnvironmentContext, :optional => true},
-    NEEDRESULT => {:type => ::Thrift::Types::BOOL, :name => 'needResult', 
:default => true, :optional => true}
+    NEEDRESULT => {:type => ::Thrift::Types::BOOL, :name => 'needResult', 
:default => true, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -2117,6 +2269,7 @@ class PartitionValuesRequest
   PARTITIONORDER = 6
   ASCENDING = 7
   MAXPARTS = 8
+  CATNAME = 9
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
@@ -2126,7 +2279,8 @@ class PartitionValuesRequest
     FILTER => {:type => ::Thrift::Types::STRING, :name => 'filter', :optional 
=> true},
     PARTITIONORDER => {:type => ::Thrift::Types::LIST, :name => 
'partitionOrder', :element => {:type => ::Thrift::Types::STRUCT, :class => 
::FieldSchema}, :optional => true},
     ASCENDING => {:type => ::Thrift::Types::BOOL, :name => 'ascending', 
:default => true, :optional => true},
-    MAXPARTS => {:type => ::Thrift::Types::I64, :name => 'maxParts', :default 
=> -1, :optional => true}
+    MAXPARTS => {:type => ::Thrift::Types::I64, :name => 'maxParts', :default 
=> -1, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -2205,6 +2359,7 @@ class Function
   CREATETIME = 6
   FUNCTIONTYPE = 7
   RESOURCEURIS = 8
+  CATNAME = 9
 
   FIELDS = {
     FUNCTIONNAME => {:type => ::Thrift::Types::STRING, :name => 
'functionName'},
@@ -2214,7 +2369,8 @@ class Function
     OWNERTYPE => {:type => ::Thrift::Types::I32, :name => 'ownerType', 
:enum_class => ::PrincipalType},
     CREATETIME => {:type => ::Thrift::Types::I32, :name => 'createTime'},
     FUNCTIONTYPE => {:type => ::Thrift::Types::I32, :name => 'functionType', 
:enum_class => ::FunctionType},
-    RESOURCEURIS => {:type => ::Thrift::Types::LIST, :name => 'resourceUris', 
:element => {:type => ::Thrift::Types::STRUCT, :class => ::ResourceUri}}
+    RESOURCEURIS => {:type => ::Thrift::Types::LIST, :name => 'resourceUris', 
:element => {:type => ::Thrift::Types::STRUCT, :class => ::ResourceUri}},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3015,12 +3171,14 @@ end
 
 class CreationMetadata
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DBNAME = 1
-  TBLNAME = 2
-  TABLESUSED = 3
-  VALIDTXNLIST = 4
+  CATNAME = 1
+  DBNAME = 2
+  TBLNAME = 3
+  TABLESUSED = 4
+  VALIDTXNLIST = 5
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
     TABLESUSED => {:type => ::Thrift::Types::SET, :name => 'tablesUsed', 
:element => {:type => ::Thrift::Types::STRING}},
@@ -3030,6 +3188,7 @@ class CreationMetadata
   def struct_fields; FIELDS; end
 
   def validate
+    raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field catName is unset!') unless @catName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field dbName is unset!') unless @dbName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tblName is unset!') unless @tblName
     raise 
::Thrift::ProtocolException.new(::Thrift::ProtocolException::UNKNOWN, 'Required 
field tablesUsed is unset!') unless @tablesUsed
@@ -3066,6 +3225,7 @@ class NotificationEvent
   TABLENAME = 5
   MESSAGE = 6
   MESSAGEFORMAT = 7
+  CATNAME = 8
 
   FIELDS = {
     EVENTID => {:type => ::Thrift::Types::I64, :name => 'eventId'},
@@ -3074,7 +3234,8 @@ class NotificationEvent
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName', :optional 
=> true},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName', 
:optional => true},
     MESSAGE => {:type => ::Thrift::Types::STRING, :name => 'message'},
-    MESSAGEFORMAT => {:type => ::Thrift::Types::STRING, :name => 
'messageFormat', :optional => true}
+    MESSAGEFORMAT => {:type => ::Thrift::Types::STRING, :name => 
'messageFormat', :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3127,10 +3288,12 @@ class NotificationEventsCountRequest
   include ::Thrift::Struct, ::Thrift::Struct_Union
   FROMEVENTID = 1
   DBNAME = 2
+  CATNAME = 3
 
   FIELDS = {
     FROMEVENTID => {:type => ::Thrift::Types::I64, :name => 'fromEventId'},
-    DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'}
+    DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3211,13 +3374,15 @@ class FireEventRequest
   DBNAME = 3
   TABLENAME = 4
   PARTITIONVALS = 5
+  CATNAME = 6
 
   FIELDS = {
     SUCCESSFUL => {:type => ::Thrift::Types::BOOL, :name => 'successful'},
     DATA => {:type => ::Thrift::Types::STRUCT, :name => 'data', :class => 
::FireEventRequestData},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName', :optional 
=> true},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName', 
:optional => true},
-    PARTITIONVALS => {:type => ::Thrift::Types::LIST, :name => 
'partitionVals', :element => {:type => ::Thrift::Types::STRING}, :optional => 
true}
+    PARTITIONVALS => {:type => ::Thrift::Types::LIST, :name => 
'partitionVals', :element => {:type => ::Thrift::Types::STRING}, :optional => 
true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3498,11 +3663,13 @@ class GetTableRequest
   DBNAME = 1
   TBLNAME = 2
   CAPABILITIES = 3
+  CATNAME = 4
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAME => {:type => ::Thrift::Types::STRING, :name => 'tblName'},
-    CAPABILITIES => {:type => ::Thrift::Types::STRUCT, :name => 
'capabilities', :class => ::ClientCapabilities, :optional => true}
+    CAPABILITIES => {:type => ::Thrift::Types::STRUCT, :name => 
'capabilities', :class => ::ClientCapabilities, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3537,11 +3704,13 @@ class GetTablesRequest
   DBNAME = 1
   TBLNAMES = 2
   CAPABILITIES = 3
+  CATNAME = 4
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TBLNAMES => {:type => ::Thrift::Types::LIST, :name => 'tblNames', :element 
=> {:type => ::Thrift::Types::STRING}, :optional => true},
-    CAPABILITIES => {:type => ::Thrift::Types::STRUCT, :name => 
'capabilities', :class => ::ClientCapabilities, :optional => true}
+    CAPABILITIES => {:type => ::Thrift::Types::STRUCT, :name => 
'capabilities', :class => ::ClientCapabilities, :optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -3611,12 +3780,14 @@ class TableMeta
   TABLENAME = 2
   TABLETYPE = 3
   COMMENTS = 4
+  CATNAME = 5
 
   FIELDS = {
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     TABLENAME => {:type => ::Thrift::Types::STRING, :name => 'tableName'},
     TABLETYPE => {:type => ::Thrift::Types::STRING, :name => 'tableType'},
-    COMMENTS => {:type => ::Thrift::Types::STRING, :name => 'comments', 
:optional => true}
+    COMMENTS => {:type => ::Thrift::Types::STRING, :name => 'comments', 
:optional => true},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName', 
:optional => true}
   }
 
   def struct_fields; FIELDS; end
@@ -4421,16 +4592,18 @@ class ISchema
   include ::Thrift::Struct, ::Thrift::Struct_Union
   SCHEMATYPE = 1
   NAME = 2
-  DBNAME = 3
-  COMPATIBILITY = 4
-  VALIDATIONLEVEL = 5
-  CANEVOLVE = 6
-  SCHEMAGROUP = 7
-  DESCRIPTION = 8
+  CATNAME = 3
+  DBNAME = 4
+  COMPATIBILITY = 5
+  VALIDATIONLEVEL = 6
+  CANEVOLVE = 7
+  SCHEMAGROUP = 8
+  DESCRIPTION = 9
 
   FIELDS = {
     SCHEMATYPE => {:type => ::Thrift::Types::I32, :name => 'schemaType', 
:enum_class => ::SchemaType},
     NAME => {:type => ::Thrift::Types::STRING, :name => 'name'},
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     COMPATIBILITY => {:type => ::Thrift::Types::I32, :name => 'compatibility', 
:enum_class => ::SchemaCompatibility},
     VALIDATIONLEVEL => {:type => ::Thrift::Types::I32, :name => 
'validationLevel', :enum_class => ::SchemaValidation},
@@ -4458,10 +4631,12 @@ end
 
 class ISchemaName
   include ::Thrift::Struct, ::Thrift::Struct_Union
-  DBNAME = 1
-  SCHEMANAME = 2
+  CATNAME = 1
+  DBNAME = 2
+  SCHEMANAME = 3
 
   FIELDS = {
+    CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
     DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbName'},
     SCHEMANAME => {:type => ::Thrift::Types::STRING, :name => 'schemaName'}
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb 
b/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
index 7a07b73..c103675 100644
--- a/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
+++ b/standalone-metastore/src/gen/thrift/gen-rb/thrift_hive_metastore.rb
@@ -43,6 +43,73 @@ module ThriftHiveMetastore
       return
     end
 
+    def create_catalog(catalog)
+      send_create_catalog(catalog)
+      recv_create_catalog()
+    end
+
+    def send_create_catalog(catalog)
+      send_message('create_catalog', Create_catalog_args, :catalog => catalog)
+    end
+
+    def recv_create_catalog()
+      result = receive_message(Create_catalog_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise result.o3 unless result.o3.nil?
+      return
+    end
+
+    def get_catalog(catName)
+      send_get_catalog(catName)
+      return recv_get_catalog()
+    end
+
+    def send_get_catalog(catName)
+      send_message('get_catalog', Get_catalog_args, :catName => catName)
+    end
+
+    def recv_get_catalog()
+      result = receive_message(Get_catalog_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise 
::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT,
 'get_catalog failed: unknown result')
+    end
+
+    def get_catalogs()
+      send_get_catalogs()
+      return recv_get_catalogs()
+    end
+
+    def send_get_catalogs()
+      send_message('get_catalogs', Get_catalogs_args)
+    end
+
+    def recv_get_catalogs()
+      result = receive_message(Get_catalogs_result)
+      return result.success unless result.success.nil?
+      raise result.o1 unless result.o1.nil?
+      raise 
::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT,
 'get_catalogs failed: unknown result')
+    end
+
+    def drop_catalog(catName)
+      send_drop_catalog(catName)
+      recv_drop_catalog()
+    end
+
+    def send_drop_catalog(catName)
+      send_message('drop_catalog', Drop_catalog_args, :catName => catName)
+    end
+
+    def recv_drop_catalog()
+      result = receive_message(Drop_catalog_result)
+      raise result.o1 unless result.o1.nil?
+      raise result.o2 unless result.o2.nil?
+      raise result.o3 unless result.o3.nil?
+      return
+    end
+
     def create_database(database)
       send_create_database(database)
       recv_create_database()
@@ -660,13 +727,13 @@ module ThriftHiveMetastore
       raise 
::Thrift::ApplicationException.new(::Thrift::ApplicationException::MISSING_RESULT,
 'get_materialization_invalidation_info failed: unknown result')
     end
 
-    def update_creation_metadata(dbname, tbl_name, creation_metadata)
-      send_update_creation_metadata(dbname, tbl_name, creation_metadata)
+    def update_creation_metadata(catName, dbname, tbl_name, creation_metadata)
+      send_update_creation_metadata(catName, dbname, tbl_name, 
creation_metadata)
       recv_update_creation_metadata()
     end
 
-    def send_update_creation_metadata(dbname, tbl_name, creation_metadata)
-      send_message('update_creation_metadata', Update_creation_metadata_args, 
:dbname => dbname, :tbl_name => tbl_name, :creation_metadata => 
creation_metadata)
+    def send_update_creation_metadata(catName, dbname, tbl_name, 
creation_metadata)
+      send_message('update_creation_metadata', Update_creation_metadata_args, 
:catName => catName, :dbname => dbname, :tbl_name => tbl_name, 
:creation_metadata => creation_metadata)
     end
 
     def recv_update_creation_metadata()
@@ -3308,6 +3375,60 @@ module ThriftHiveMetastore
       write_result(result, oprot, 'setMetaConf', seqid)
     end
 
+    def process_create_catalog(seqid, iprot, oprot)
+      args = read_args(iprot, Create_catalog_args)
+      result = Create_catalog_result.new()
+      begin
+        @handler.create_catalog(args.catalog)
+      rescue ::AlreadyExistsException => o1
+        result.o1 = o1
+      rescue ::InvalidObjectException => o2
+        result.o2 = o2
+      rescue ::MetaException => o3
+        result.o3 = o3
+      end
+      write_result(result, oprot, 'create_catalog', seqid)
+    end
+
+    def process_get_catalog(seqid, iprot, oprot)
+      args = read_args(iprot, Get_catalog_args)
+      result = Get_catalog_result.new()
+      begin
+        result.success = @handler.get_catalog(args.catName)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::MetaException => o2
+        result.o2 = o2
+      end
+      write_result(result, oprot, 'get_catalog', seqid)
+    end
+
+    def process_get_catalogs(seqid, iprot, oprot)
+      args = read_args(iprot, Get_catalogs_args)
+      result = Get_catalogs_result.new()
+      begin
+        result.success = @handler.get_catalogs()
+      rescue ::MetaException => o1
+        result.o1 = o1
+      end
+      write_result(result, oprot, 'get_catalogs', seqid)
+    end
+
+    def process_drop_catalog(seqid, iprot, oprot)
+      args = read_args(iprot, Drop_catalog_args)
+      result = Drop_catalog_result.new()
+      begin
+        @handler.drop_catalog(args.catName)
+      rescue ::NoSuchObjectException => o1
+        result.o1 = o1
+      rescue ::InvalidOperationException => o2
+        result.o2 = o2
+      rescue ::MetaException => o3
+        result.o3 = o3
+      end
+      write_result(result, oprot, 'drop_catalog', seqid)
+    end
+
     def process_create_database(seqid, iprot, oprot)
       args = read_args(iprot, Create_database_args)
       result = Create_database_result.new()
@@ -3799,7 +3920,7 @@ module ThriftHiveMetastore
       args = read_args(iprot, Update_creation_metadata_args)
       result = Update_creation_metadata_result.new()
       begin
-        @handler.update_creation_metadata(args.dbname, args.tbl_name, 
args.creation_metadata)
+        @handler.update_creation_metadata(args.catName, args.dbname, 
args.tbl_name, args.creation_metadata)
       rescue ::MetaException => o1
         result.o1 = o1
       rescue ::InvalidOperationException => o2
@@ -5826,6 +5947,147 @@ module ThriftHiveMetastore
     ::Thrift::Struct.generate_accessors self
   end
 
+  class Create_catalog_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    CATALOG = 1
+
+    FIELDS = {
+      CATALOG => {:type => ::Thrift::Types::STRUCT, :name => 'catalog', :class 
=> ::CreateCatalogRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Create_catalog_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+    O3 = 3
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => 
::AlreadyExistsException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => 
::InvalidObjectException},
+      O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => 
::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_catalog_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    CATNAME = 1
+
+    FIELDS = {
+      CATNAME => {:type => ::Thrift::Types::STRUCT, :name => 'catName', :class 
=> ::GetCatalogRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_catalog_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+    O2 = 2
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class 
=> ::GetCatalogResponse},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => 
::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => 
::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_catalogs_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+
+    FIELDS = {
+
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Get_catalogs_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    SUCCESS = 0
+    O1 = 1
+
+    FIELDS = {
+      SUCCESS => {:type => ::Thrift::Types::STRUCT, :name => 'success', :class 
=> ::GetCatalogsResponse},
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => 
::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Drop_catalog_args
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    CATNAME = 1
+
+    FIELDS = {
+      CATNAME => {:type => ::Thrift::Types::STRUCT, :name => 'catName', :class 
=> ::DropCatalogRequest}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
+  class Drop_catalog_result
+    include ::Thrift::Struct, ::Thrift::Struct_Union
+    O1 = 1
+    O2 = 2
+    O3 = 3
+
+    FIELDS = {
+      O1 => {:type => ::Thrift::Types::STRUCT, :name => 'o1', :class => 
::NoSuchObjectException},
+      O2 => {:type => ::Thrift::Types::STRUCT, :name => 'o2', :class => 
::InvalidOperationException},
+      O3 => {:type => ::Thrift::Types::STRUCT, :name => 'o3', :class => 
::MetaException}
+    }
+
+    def struct_fields; FIELDS; end
+
+    def validate
+    end
+
+    ::Thrift::Struct.generate_accessors self
+  end
+
   class Create_database_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
     DATABASE = 1
@@ -7197,11 +7459,13 @@ module ThriftHiveMetastore
 
   class Update_creation_metadata_args
     include ::Thrift::Struct, ::Thrift::Struct_Union
-    DBNAME = 1
-    TBL_NAME = 2
-    CREATION_METADATA = 3
+    CATNAME = 1
+    DBNAME = 2
+    TBL_NAME = 3
+    CREATION_METADATA = 4
 
     FIELDS = {
+      CATNAME => {:type => ::Thrift::Types::STRING, :name => 'catName'},
       DBNAME => {:type => ::Thrift::Types::STRING, :name => 'dbname'},
       TBL_NAME => {:type => ::Thrift::Types::STRING, :name => 'tbl_name'},
       CREATION_METADATA => {:type => ::Thrift::Types::STRUCT, :name => 
'creation_metadata', :class => ::CreationMetadata}

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
index bdac161..8e920bb 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AggregateStatsCache.java
@@ -152,16 +152,16 @@ public class AggregateStatsCache {
    * Return aggregate stats for a column from the cache or null.
    * While reading from the nodelist for a key, we wait maxReaderWaitTime to 
acquire the lock,
    * failing which we return a cache miss (i.e. null)
-   *
-   * @param dbName
-   * @param tblName
-   * @param colName
-   * @param partNames
-   * @return
+   * @param catName catalog name
+   * @param dbName database name
+   * @param tblName table name
+   * @param colName column name
+   * @param partNames list of partition names
+   * @return aggregated col stats
    */
-  public AggrColStats get(String dbName, String tblName, String colName, 
List<String> partNames) {
+  public AggrColStats get(String catName, String dbName, String tblName, 
String colName, List<String> partNames) {
     // Cache key
-    Key key = new Key(dbName, tblName, colName);
+    Key key = new Key(catName, dbName, tblName, colName);
     AggrColStatsList candidateList = cacheStore.get(key);
     // No key, or no nodes in candidate list
     if ((candidateList == null) || (candidateList.nodes.size() == 0)) {
@@ -267,23 +267,23 @@ public class AggregateStatsCache {
    * Add a new node to the cache; may trigger the cleaner thread if the cache 
is near full capacity.
    * We'll however add the node even if we temporaily exceed maxCacheNodes, 
because the cleaner
    * will eventually create space from expired nodes or by removing LRU nodes.
-   *
-   * @param dbName
-   * @param tblName
-   * @param colName
+   * @param catName catalog name
+   * @param dbName database name
+   * @param tblName table name
+   * @param colName column name
    * @param numPartsCached
    * @param colStats
    * @param bloomFilter
    */
   // TODO: make add asynchronous: add shouldn't block the higher level calls
-  public void add(String dbName, String tblName, String colName, long 
numPartsCached,
+  public void add(String catName, String dbName, String tblName, String 
colName, long numPartsCached,
       ColumnStatisticsObj colStats, BloomFilter bloomFilter) {
     // If we have no space in the cache, run cleaner thread
     if (getCurrentNodes() / maxCacheNodes > maxFull) {
       spawnCleaner();
     }
     // Cache key
-    Key key = new Key(dbName, tblName, colName);
+    Key key = new Key(catName, dbName, tblName, colName);
     // Add new node to the cache
     AggrColStats node = new AggrColStats(numPartsCached, bloomFilter, 
colStats);
     AggrColStatsList nodeList;
@@ -463,15 +463,17 @@ public class AggregateStatsCache {
    * Key object for the stats cache hashtable
    */
   static class Key {
+    private final String catName;
     private final String dbName;
     private final String tblName;
     private final String colName;
 
-    Key(String db, String table, String col) {
+    Key(String cat, String db, String table, String col) {
       // Don't construct an illegal cache key
-      if ((db == null) || (table == null) || (col == null)) {
-        throw new IllegalArgumentException("dbName, tblName, colName can't be 
null");
+      if (cat == null || (db == null) || (table == null) || (col == null)) {
+        throw new IllegalArgumentException("catName, dbName, tblName, colName 
can't be null");
       }
+      catName = cat;
       dbName = db;
       tblName = table;
       colName = col;
@@ -483,18 +485,20 @@ public class AggregateStatsCache {
         return false;
       }
       Key that = (Key) other;
-      return dbName.equals(that.dbName) && tblName.equals(that.tblName)
-          && colName.equals(that.colName);
+      return catName.equals(that.catName) && dbName.equals(that.dbName) &&
+          tblName.equals(that.tblName) && colName.equals(that.colName);
     }
 
     @Override
     public int hashCode() {
-      return dbName.hashCode() * 31 + tblName.hashCode() * 31 + 
colName.hashCode();
+      return catName.hashCode() * 31 + dbName.hashCode() * 31 + 
tblName.hashCode() * 31 +
+          colName.hashCode();
     }
 
     @Override
     public String toString() {
-      return "database:" + dbName + ", table:" + tblName + ", column:" + 
colName;
+      return "catalog: " + catName + ", database:" + dbName + ", table:" + 
tblName + ", column:" +
+          colName;
     }
 
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AlterHandler.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AlterHandler.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AlterHandler.java
index fc0b4d7..050dca9 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AlterHandler.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/AlterHandler.java
@@ -35,7 +35,7 @@ public interface AlterHandler extends Configurable {
 
   /**
    * @deprecated As of release 2.2.0. Replaced by {@link #alterTable(RawStore, 
Warehouse, String,
-   * String, Table, EnvironmentContext, IHMSHandler)}
+   * String, String, Table, EnvironmentContext, IHMSHandler)}
    *
    * handles alter table, the changes could be cascaded to partitions if 
applicable
    *
@@ -43,6 +43,8 @@ public interface AlterHandler extends Configurable {
    *          object to get metadata
    * @param wh
    *          Hive Warehouse where table data is stored
+   * @param catName
+   *          catalog of the table being altered
    * @param dbname
    *          database of the table being altered
    * @param name
@@ -56,9 +58,11 @@ public interface AlterHandler extends Configurable {
    *           thrown if there is any other error
    */
   @Deprecated
-  void alterTable(RawStore msdb, Warehouse wh, String dbname,
+  default void alterTable(RawStore msdb, Warehouse wh, String catName, String 
dbname,
     String name, Table newTable, EnvironmentContext envContext)
-      throws InvalidOperationException, MetaException;
+      throws InvalidOperationException, MetaException {
+    alterTable(msdb, wh, catName, dbname, name, newTable, envContext, null);
+  }
 
   /**
    * handles alter table, the changes could be cascaded to partitions if 
applicable
@@ -67,6 +71,7 @@ public interface AlterHandler extends Configurable {
    *          object to get metadata
    * @param wh
    *          Hive Warehouse where table data is stored
+   * @param catName catalog of the table being altered
    * @param dbname
    *          database of the table being altered
    * @param name
@@ -81,7 +86,7 @@ public interface AlterHandler extends Configurable {
    * @throws MetaException
    *           thrown if there is any other error
    */
-  void alterTable(RawStore msdb, Warehouse wh, String dbname,
+  void alterTable(RawStore msdb, Warehouse wh, String catName, String dbname,
       String name, Table newTable, EnvironmentContext envContext,
       IHMSHandler handler) throws InvalidOperationException, MetaException;
 
@@ -119,7 +124,8 @@ public interface AlterHandler extends Configurable {
    *
    * @param msdb
    *          object to get metadata
-   * @param wh
+   * @param wh physical warehouse class
+   * @param catName catalog name
    * @param dbname
    *          database of the partition being altered
    * @param name
@@ -136,14 +142,15 @@ public interface AlterHandler extends Configurable {
    * @throws AlreadyExistsException
    * @throws MetaException
    */
-  Partition alterPartition(final RawStore msdb, Warehouse wh, final String 
dbname,
-    final String name, final List<String> part_vals, final Partition new_part, 
EnvironmentContext environmentContext,
-    IHMSHandler handler)
+  Partition alterPartition(final RawStore msdb, Warehouse wh, final String 
catName,
+                           final String dbname, final String name, final 
List<String> part_vals,
+                           final Partition new_part, EnvironmentContext 
environmentContext,
+                           IHMSHandler handler)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException;
 
   /**
-   * @deprecated As of release 2.2.0. Replaced by {@link 
#alterPartitions(RawStore, Warehouse, String,
-   * String, List, EnvironmentContext, IHMSHandler)}
+   * @deprecated As of release 3.0.0. Replaced by {@link 
#alterPartitions(RawStore, Warehouse, String,
+   * String, String, List, EnvironmentContext, IHMSHandler)}
    *
    * handles alter partitions
    *
@@ -188,7 +195,7 @@ public interface AlterHandler extends Configurable {
    * @throws AlreadyExistsException
    * @throws MetaException
    */
-  List<Partition> alterPartitions(final RawStore msdb, Warehouse wh,
+  List<Partition> alterPartitions(final RawStore msdb, Warehouse wh, final 
String catName,
     final String dbname, final String name, final List<Partition> new_parts,
     EnvironmentContext environmentContext,IHMSHandler handler)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException;

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
index ca63333..4e1daba 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java
@@ -27,6 +27,7 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.PartitionSpec;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.TableMeta;
 
 /**
  * Default no-op implementation of the MetaStoreFilterHook that returns the 
result as is
@@ -47,11 +48,17 @@ public class DefaultMetaStoreFilterHookImpl implements 
MetaStoreFilterHook {
   }
 
   @Override
-  public List<String> filterTableNames(String dbName, List<String> tableList) 
throws MetaException {
+  public List<String> filterTableNames(String catName, String dbName, 
List<String> tableList)
+      throws MetaException {
     return tableList;
   }
 
   @Override
+  public List<TableMeta> filterTableMetas(List<TableMeta> tableMetas) throws 
MetaException {
+    return tableMetas;
+  }
+
+  @Override
   public Table filterTable(Table table)  throws NoSuchObjectException {
     return table;
   }
@@ -78,7 +85,7 @@ public class DefaultMetaStoreFilterHookImpl implements 
MetaStoreFilterHook {
   }
 
   @Override
-  public List<String> filterPartitionNames(String dbName, String tblName,
+  public List<String> filterPartitionNames(String catName, String dbName, 
String tblName,
       List<String> partitionNames) throws MetaException {
     return partitionNames;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/ba8a99e1/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
index 04828e5..ed1b8c5 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
@@ -61,6 +61,10 @@ import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_CATALOG_NAME;
+import static 
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.getDefaultCatalog;
+import static 
org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier;
+
 /**
  * Hive specific implementation of alter
  */
@@ -85,16 +89,10 @@ public class HiveAlterHandler implements AlterHandler {
   }
 
   @Override
-  public void alterTable(RawStore msdb, Warehouse wh, String dbname,
-    String name, Table newt, EnvironmentContext environmentContext)
-      throws InvalidOperationException, MetaException {
-    alterTable(msdb, wh, dbname, name, newt, environmentContext, null);
-  }
-
-  @Override
-  public void alterTable(RawStore msdb, Warehouse wh, String dbname,
+  public void alterTable(RawStore msdb, Warehouse wh, String catName, String 
dbname,
       String name, Table newt, EnvironmentContext environmentContext,
       IHMSHandler handler) throws InvalidOperationException, MetaException {
+    catName = normalizeIdentifier(catName);
     name = name.toLowerCase();
     dbname = dbname.toLowerCase();
 
@@ -135,9 +133,15 @@ public class HiveAlterHandler implements AlterHandler {
       boolean isPartitionedTable = false;
       List<Partition> parts;
 
+      // Switching tables between catalogs is not allowed.
+      if (!catName.equalsIgnoreCase(newt.getCatName())) {
+        throw new InvalidOperationException("Tables cannot be moved between 
catalogs, old catalog" +
+            catName + ", new catalog " + newt.getCatName());
+      }
+
       // check if table with the new name already exists
       if (!newTblName.equals(name) || !newDbName.equals(dbname)) {
-        if (msdb.getTable(newDbName, newTblName) != null) {
+        if (msdb.getTable(catName, newDbName, newTblName) != null) {
           throw new InvalidOperationException("new table " + newDbName
               + "." + newTblName + " already exists");
         }
@@ -146,9 +150,10 @@ public class HiveAlterHandler implements AlterHandler {
 
       msdb.openTransaction();
       // get old table
-      oldt = msdb.getTable(dbname, name);
+      oldt = msdb.getTable(catName, dbname, name);
       if (oldt == null) {
-        throw new InvalidOperationException("table " + dbname + "." + name + " 
doesn't exist");
+        throw new InvalidOperationException("table " +
+            Warehouse.getCatalogQualifiedTableName(catName, dbname, name) + " 
doesn't exist");
       }
 
       if (oldt.getPartitionKeysSize() != 0) {
@@ -188,7 +193,7 @@ public class HiveAlterHandler implements AlterHandler {
           && (oldt.getSd().getLocation().compareTo(newt.getSd().getLocation()) 
== 0
             || StringUtils.isEmpty(newt.getSd().getLocation()))
           && !MetaStoreUtils.isExternalTable(oldt)) {
-        Database olddb = msdb.getDatabase(dbname);
+        Database olddb = msdb.getDatabase(catName, dbname);
         // if a table was created in a user specified location using the DDL 
like
         // create table tbl ... location ...., it should be treated like an 
external table
         // in the table rename, its data location should not be changed. We 
can check
@@ -204,7 +209,7 @@ public class HiveAlterHandler implements AlterHandler {
           srcFs = wh.getFs(srcPath);
 
           // get new location
-          Database db = msdb.getDatabase(newDbName);
+          Database db = msdb.getDatabase(catName, newDbName);
           Path databasePath = constructRenamedPath(wh.getDatabasePath(db), 
srcPath);
           destPath = new Path(databasePath, newTblName);
           destFs = wh.getFs(destPath);
@@ -222,8 +227,9 @@ public class HiveAlterHandler implements AlterHandler {
 
           try {
             if (destFs.exists(destPath)) {
-              throw new InvalidOperationException("New location for this table 
"
-                  + newDbName + "." + newTblName + " already exists : " + 
destPath);
+              throw new InvalidOperationException("New location for this table 
" +
+                  Warehouse.getCatalogQualifiedTableName(catName, newDbName, 
newTblName) +
+                      " already exists : " + destPath);
             }
             // check that src exists and also checks permissions necessary, 
rename src to dest
             if (srcFs.exists(srcPath) && wh.renameDir(srcPath, destPath, 
true)) {
@@ -242,7 +248,7 @@ public class HiveAlterHandler implements AlterHandler {
           String newTblLocPath = dataWasMoved ? destPath.toUri().getPath() : 
null;
 
           // also the location field in partition
-          parts = msdb.getPartitions(dbname, name, -1);
+          parts = msdb.getPartitions(catName, dbname, name, -1);
           Map<Partition, ColumnStatistics> columnStatsNeedUpdated = new 
HashMap<>();
           for (Partition part : parts) {
             String oldPartLoc = part.getSd().getLocation();
@@ -254,13 +260,13 @@ public class HiveAlterHandler implements AlterHandler {
             }
             part.setDbName(newDbName);
             part.setTableName(newTblName);
-            ColumnStatistics colStats = updateOrGetPartitionColumnStats(msdb, 
dbname, name,
+            ColumnStatistics colStats = updateOrGetPartitionColumnStats(msdb, 
catName, dbname, name,
                 part.getValues(), part.getSd().getCols(), oldt, part, null);
             if (colStats != null) {
               columnStatsNeedUpdated.put(part, colStats);
             }
           }
-          msdb.alterTable(dbname, name, newt);
+          msdb.alterTable(catName, dbname, name, newt);
           // alterPartition is only for changing the partition location in the 
table rename
           if (dataWasMoved) {
 
@@ -278,7 +284,7 @@ public class HiveAlterHandler implements AlterHandler {
               for (Partition part : partBatch) {
                 partValues.add(part.getValues());
               }
-              msdb.alterPartitions(newDbName, newTblName, partValues, 
partBatch);
+              msdb.alterPartitions(catName, newDbName, newTblName, partValues, 
partBatch);
             }
           }
 
@@ -295,7 +301,7 @@ public class HiveAlterHandler implements AlterHandler {
         // operations other than table rename
         if (MetaStoreUtils.requireCalStats(null, null, newt, 
environmentContext) &&
             !isPartitionedTable) {
-          Database db = msdb.getDatabase(newDbName);
+          Database db = msdb.getDatabase(catName, newDbName);
           // Update table stats. For partitioned table, we update stats in 
alterPartition()
           MetaStoreUtils.updateTableStatsFast(db, newt, wh, false, true, 
environmentContext, false);
         }
@@ -303,23 +309,23 @@ public class HiveAlterHandler implements AlterHandler {
         if (isPartitionedTable) {
           //Currently only column related changes can be cascaded in alter 
table
           if(!MetaStoreUtils.areSameColumns(oldt.getSd().getCols(), 
newt.getSd().getCols())) {
-            parts = msdb.getPartitions(dbname, name, -1);
+            parts = msdb.getPartitions(catName, dbname, name, -1);
             for (Partition part : parts) {
               Partition oldPart = new Partition(part);
               List<FieldSchema> oldCols = part.getSd().getCols();
               part.getSd().setCols(newt.getSd().getCols());
-              ColumnStatistics colStats = 
updateOrGetPartitionColumnStats(msdb, dbname, name,
+              ColumnStatistics colStats = 
updateOrGetPartitionColumnStats(msdb, catName, dbname, name,
                   part.getValues(), oldCols, oldt, part, null);
               assert(colStats == null);
               if (cascade) {
-                msdb.alterPartition(dbname, name, part.getValues(), part);
+                msdb.alterPartition(catName, dbname, name, part.getValues(), 
part);
               } else {
                 // update changed properties (stats)
                 oldPart.setParameters(part.getParameters());
-                msdb.alterPartition(dbname, name, part.getValues(), oldPart);
+                msdb.alterPartition(catName, dbname, name, part.getValues(), 
oldPart);
               }
             }
-            msdb.alterTable(dbname, name, newt);
+            msdb.alterTable(catName, dbname, name, newt);
           } else {
             LOG.warn("Alter table not cascaded to partitions.");
             alterTableUpdateTableColumnStats(msdb, oldt, newt);
@@ -345,7 +351,8 @@ public class HiveAlterHandler implements AlterHandler {
                   new CreateTableEvent(newt, true, handler),
                   environmentContext);
           if (isPartitionedTable) {
-            parts = msdb.getPartitions(newt.getDbName(), newt.getTableName(), 
-1);
+            String cName = newt.isSetCatName() ? newt.getCatName() : 
DEFAULT_CATALOG_NAME;
+            parts = msdb.getPartitions(cName, newt.getDbName(), 
newt.getTableName(), -1);
             MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
                     EventMessage.EventType.ADD_PARTITION,
                     new AddPartitionEvent(newt, parts, true, handler),
@@ -372,7 +379,8 @@ public class HiveAlterHandler implements AlterHandler {
               + " Check metastore logs for detailed stack." + e.getMessage());
     } finally {
       if (!success) {
-        LOG.error("Failed to alter table " + dbname + "." + name);
+        LOG.error("Failed to alter table " +
+            Warehouse.getCatalogQualifiedTableName(catName, dbname, name));
         msdb.rollbackTransaction();
         if (dataWasMoved) {
           try {
@@ -413,13 +421,15 @@ public class HiveAlterHandler implements AlterHandler {
     final String name, final List<String> part_vals, final Partition new_part,
     EnvironmentContext environmentContext)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException {
-    return alterPartition(msdb, wh, dbname, name, part_vals, new_part, 
environmentContext, null);
+    return alterPartition(msdb, wh, DEFAULT_CATALOG_NAME, dbname, name, 
part_vals, new_part,
+        environmentContext, null);
   }
 
   @Override
-  public Partition alterPartition(final RawStore msdb, Warehouse wh, final 
String dbname,
-    final String name, final List<String> part_vals, final Partition new_part,
-    EnvironmentContext environmentContext, IHMSHandler handler)
+  public Partition alterPartition(final RawStore msdb, Warehouse wh, final 
String catName,
+                                  final String dbname, final String name,
+                                  final List<String> part_vals, final 
Partition new_part,
+                                  EnvironmentContext environmentContext, 
IHMSHandler handler)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException {
     boolean success = false;
     Partition oldPart;
@@ -436,18 +446,17 @@ public class HiveAlterHandler implements AlterHandler {
           .currentTimeMillis() / 1000));
     }
 
-
     //alter partition
     if (part_vals == null || part_vals.size() == 0) {
       try {
         msdb.openTransaction();
 
-        Table tbl = msdb.getTable(dbname, name);
+        Table tbl = msdb.getTable(catName, dbname, name);
         if (tbl == null) {
           throw new InvalidObjectException(
               "Unable to alter partition because table or database does not 
exist.");
         }
-        oldPart = msdb.getPartition(dbname, name, new_part.getValues());
+        oldPart = msdb.getPartition(catName, dbname, name, 
new_part.getValues());
         if (MetaStoreUtils.requireCalStats(oldPart, new_part, tbl, 
environmentContext)) {
           // if stats are same, no need to update
           if (MetaStoreUtils.isFastStatsSame(oldPart, new_part)) {
@@ -460,10 +469,10 @@ public class HiveAlterHandler implements AlterHandler {
 
         // PartitionView does not have SD. We do not need update its column 
stats
         if (oldPart.getSd() != null) {
-          updateOrGetPartitionColumnStats(msdb, dbname, name, 
new_part.getValues(),
+          updateOrGetPartitionColumnStats(msdb, catName, dbname, name, 
new_part.getValues(),
               oldPart.getSd().getCols(), tbl, new_part, null);
         }
-        msdb.alterPartition(dbname, name, new_part.getValues(), new_part);
+        msdb.alterPartition(catName, dbname, name, new_part.getValues(), 
new_part);
         if (transactionalListeners != null && 
!transactionalListeners.isEmpty()) {
           MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
                                                 
EventMessage.EventType.ALTER_PARTITION,
@@ -496,13 +505,13 @@ public class HiveAlterHandler implements AlterHandler {
     boolean dataWasMoved = false;
     try {
       msdb.openTransaction();
-      Table tbl = msdb.getTable(dbname, name);
+      Table tbl = msdb.getTable(DEFAULT_CATALOG_NAME, dbname, name);
       if (tbl == null) {
         throw new InvalidObjectException(
             "Unable to alter partition because table or database does not 
exist.");
       }
       try {
-        oldPart = msdb.getPartition(dbname, name, part_vals);
+        oldPart = msdb.getPartition(catName, dbname, name, part_vals);
       } catch (NoSuchObjectException e) {
         // this means there is no existing partition
         throw new InvalidObjectException(
@@ -511,7 +520,7 @@ public class HiveAlterHandler implements AlterHandler {
 
       Partition check_part;
       try {
-        check_part = msdb.getPartition(dbname, name, new_part.getValues());
+        check_part = msdb.getPartition(catName, dbname, name, 
new_part.getValues());
       } catch(NoSuchObjectException e) {
         // this means there is no existing partition
         check_part = null;
@@ -530,7 +539,7 @@ public class HiveAlterHandler implements AlterHandler {
         try {
           // if tbl location is available use it
           // else derive the tbl location from database location
-          destPath = wh.getPartitionPath(msdb.getDatabase(dbname), tbl, 
new_part.getValues());
+          destPath = wh.getPartitionPath(msdb.getDatabase(catName, dbname), 
tbl, new_part.getValues());
           destPath = constructRenamedPath(destPath, new 
Path(new_part.getSd().getLocation()));
         } catch (NoSuchObjectException e) {
           LOG.debug("Didn't find object in metastore ", e);
@@ -593,9 +602,9 @@ public class HiveAlterHandler implements AlterHandler {
       }
 
       String newPartName = Warehouse.makePartName(tbl.getPartitionKeys(), 
new_part.getValues());
-      ColumnStatistics cs = updateOrGetPartitionColumnStats(msdb, dbname, 
name, oldPart.getValues(),
+      ColumnStatistics cs = updateOrGetPartitionColumnStats(msdb, catName, 
dbname, name, oldPart.getValues(),
           oldPart.getSd().getCols(), tbl, new_part, null);
-      msdb.alterPartition(dbname, name, part_vals, new_part);
+      msdb.alterPartition(catName, dbname, name, part_vals, new_part);
       if (cs != null) {
         cs.getStatsDesc().setPartName(newPartName);
         try {
@@ -643,13 +652,15 @@ public class HiveAlterHandler implements AlterHandler {
     final String name, final List<Partition> new_parts,
     EnvironmentContext environmentContext)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException {
-    return alterPartitions(msdb, wh, dbname, name, new_parts, 
environmentContext, null);
+    return alterPartitions(msdb, wh, DEFAULT_CATALOG_NAME, dbname, name, 
new_parts,
+        environmentContext, null);
   }
 
   @Override
-  public List<Partition> alterPartitions(final RawStore msdb, Warehouse wh, 
final String dbname,
-    final String name, final List<Partition> new_parts, EnvironmentContext 
environmentContext,
-    IHMSHandler handler)
+  public List<Partition> alterPartitions(final RawStore msdb, Warehouse wh, 
final String catName,
+                                         final String dbname, final String 
name,
+                                         final List<Partition> new_parts,
+                                         EnvironmentContext 
environmentContext, IHMSHandler handler)
       throws InvalidOperationException, InvalidObjectException, 
AlreadyExistsException, MetaException {
     List<Partition> oldParts = new ArrayList<>();
     List<List<String>> partValsList = new ArrayList<>();
@@ -658,12 +669,11 @@ public class HiveAlterHandler implements AlterHandler {
       transactionalListeners = handler.getTransactionalListeners();
     }
 
-
     boolean success = false;
     try {
       msdb.openTransaction();
 
-      Table tbl = msdb.getTable(dbname, name);
+      Table tbl = msdb.getTable(catName, dbname, name);
       if (tbl == null) {
         throw new InvalidObjectException(
             "Unable to alter partitions because table or database does not 
exist.");
@@ -677,7 +687,7 @@ public class HiveAlterHandler implements AlterHandler {
               .currentTimeMillis() / 1000));
         }
 
-        Partition oldTmpPart = msdb.getPartition(dbname, name, 
tmpPart.getValues());
+        Partition oldTmpPart = msdb.getPartition(catName, dbname, name, 
tmpPart.getValues());
         oldParts.add(oldTmpPart);
         partValsList.add(tmpPart.getValues());
 
@@ -693,12 +703,12 @@ public class HiveAlterHandler implements AlterHandler {
 
         // PartitionView does not have SD and we do not need to update its 
column stats
         if (oldTmpPart.getSd() != null) {
-          updateOrGetPartitionColumnStats(msdb, dbname, name, 
oldTmpPart.getValues(),
+          updateOrGetPartitionColumnStats(msdb, catName, dbname, name, 
oldTmpPart.getValues(),
               oldTmpPart.getSd().getCols(), tbl, tmpPart, null);
         }
       }
 
-      msdb.alterPartitions(dbname, name, partValsList, new_parts);
+      msdb.alterPartitions(catName, dbname, name, partValsList, new_parts);
       Iterator<Partition> oldPartsIt = oldParts.iterator();
       for (Partition newPart : new_parts) {
         Partition oldPart;
@@ -768,10 +778,12 @@ public class HiveAlterHandler implements AlterHandler {
   @VisibleForTesting
   void alterTableUpdateTableColumnStats(RawStore msdb, Table oldTable, Table 
newTable)
       throws MetaException, InvalidObjectException {
+    String catName = normalizeIdentifier(oldTable.isSetCatName() ? 
oldTable.getCatName() :
+        getDefaultCatalog(conf));
     String dbName = oldTable.getDbName().toLowerCase();
-    String tableName = 
org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(oldTable.getTableName());
+    String tableName = normalizeIdentifier(oldTable.getTableName());
     String newDbName = newTable.getDbName().toLowerCase();
-    String newTableName = 
org.apache.hadoop.hive.metastore.utils.StringUtils.normalizeIdentifier(newTable.getTableName());
+    String newTableName = normalizeIdentifier(newTable.getTableName());
 
     try {
       List<FieldSchema> oldCols = oldTable.getSd().getCols();
@@ -794,7 +806,7 @@ public class HiveAlterHandler implements AlterHandler {
           }
 
           // Collect column stats which need to be rewritten and remove old 
stats
-          colStats = msdb.getTableColumnStatistics(dbName, tableName, 
oldColNames);
+          colStats = msdb.getTableColumnStatistics(catName, dbName, tableName, 
oldColNames);
           if (colStats == null) {
             updateColumnStats = false;
           } else {
@@ -813,12 +825,12 @@ public class HiveAlterHandler implements AlterHandler {
 
                 if (found) {
                   if (!newDbName.equals(dbName) || 
!newTableName.equals(tableName)) {
-                    msdb.deleteTableColumnStatistics(dbName, tableName, 
statsObj.getColName());
+                    msdb.deleteTableColumnStatistics(catName, dbName, 
tableName, statsObj.getColName());
                     newStatsObjs.add(statsObj);
                     deletedCols.add(statsObj.getColName());
                   }
                 } else {
-                  msdb.deleteTableColumnStatistics(dbName, tableName, 
statsObj.getColName());
+                  msdb.deleteTableColumnStatistics(catName, dbName, tableName, 
statsObj.getColName());
                   deletedCols.add(statsObj.getColName());
                 }
               }
@@ -828,7 +840,7 @@ public class HiveAlterHandler implements AlterHandler {
         }
 
         // Change to new table and append stats for the new table
-        msdb.alterTable(dbName, tableName, newTable);
+        msdb.alterTable(catName, dbName, tableName, newTable);
         if (updateColumnStats && !newStatsObjs.isEmpty()) {
           ColumnStatisticsDesc statsDesc = colStats.getStatsDesc();
           statsDesc.setDbName(newDbName);
@@ -845,7 +857,7 @@ public class HiveAlterHandler implements AlterHandler {
   }
 
   private ColumnStatistics updateOrGetPartitionColumnStats(
-      RawStore msdb, String dbname, String tblname, List<String> partVals,
+      RawStore msdb, String catName, String dbname, String tblname, 
List<String> partVals,
       List<FieldSchema> oldCols, Table table, Partition part, 
List<FieldSchema> newCols)
           throws MetaException, InvalidObjectException {
     ColumnStatistics newPartsColStats = null;
@@ -868,7 +880,7 @@ public class HiveAlterHandler implements AlterHandler {
         oldColNames.add(oldCol.getName());
       }
       List<String> oldPartNames = Lists.newArrayList(oldPartName);
-      List<ColumnStatistics> partsColStats = 
msdb.getPartitionColumnStatistics(dbname, tblname,
+      List<ColumnStatistics> partsColStats = 
msdb.getPartitionColumnStatistics(catName, dbname, tblname,
           oldPartNames, oldColNames);
       assert (partsColStats.size() <= 1);
       for (ColumnStatistics partColStats : partsColStats) { //actually only at 
most one loop
@@ -886,12 +898,12 @@ public class HiveAlterHandler implements AlterHandler {
           }
           if (found) {
             if (rename) {
-              msdb.deletePartitionColumnStatistics(dbname, tblname, 
partColStats.getStatsDesc().getPartName(),
+              msdb.deletePartitionColumnStatistics(catName, dbname, tblname, 
partColStats.getStatsDesc().getPartName(),
                   partVals, statsObj.getColName());
               newStatsObjs.add(statsObj);
             }
           } else {
-            msdb.deletePartitionColumnStatistics(dbname, tblname, 
partColStats.getStatsDesc().getPartName(),
+            msdb.deletePartitionColumnStatistics(catName, dbname, tblname, 
partColStats.getStatsDesc().getPartName(),
                 partVals, statsObj.getColName());
             deletedCols.add(statsObj.getColName());
           }

Reply via email to