This is an automated email from the ASF dual-hosted git repository.

dkuzmenko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new d2debebbbfe HIVE-26227: Add support for Catalog statements in HiveQL 
(Wechar Yu, reviewed by Butao Zhang, Denys Kuzmenko, Peter Vary, Shohei Okumiya)
d2debebbbfe is described below

commit d2debebbbfe7b8624332f187c1419c4cc58a5b92
Author: Wechar Yu <[email protected]>
AuthorDate: Thu Apr 3 19:12:38 2025 +0800

    HIVE-26227: Add support for Catalog statements in HiveQL (Wechar Yu, 
reviewed by Butao Zhang, Denys Kuzmenko, Peter Vary, Shohei Okumiya)
    
    Closes #3288
---
 .../java/org/apache/hadoop/hive/ql/ErrorMsg.java   |   2 +
 .../org/apache/hadoop/hive/ql/log/PerfLogger.java  |   1 +
 .../hadoop/hive/ql/parse/AlterClauseParser.g       |  14 +++
 .../apache/hadoop/hive/ql/parse/HiveLexerParent.g  |   2 +
 .../org/apache/hadoop/hive/ql/parse/HiveParser.g   |  48 ++++++++-
 .../hadoop/hive/ql/parse/IdentifiersParser.g       |   2 +-
 .../alter/AbstractAlterCatalogAnalyzer.java        |  48 +++++++++
 .../catalog/alter/AbstractAlterCatalogDesc.java    |  42 ++++++++
 .../alter/AbstractAlterCatalogOperation.java       |  49 +++++++++
 .../location/AlterCatalogSetLocationAnalyzer.java  |  47 ++++++++
 .../location/AlterCatalogSetLocationDesc.java      |  40 +++++++
 .../location/AlterCatalogSetLocationOperation.java |  61 +++++++++++
 .../ddl/catalog/create/CreateCatalogAnalyzer.java  |  70 ++++++++++++
 .../ql/ddl/catalog/create/CreateCatalogDesc.java   |  64 +++++++++++
 .../ddl/catalog/create/CreateCatalogOperation.java |  48 +++++++++
 .../ql/ddl/catalog/desc/DescCatalogAnalyzer.java   |  61 +++++++++++
 .../hive/ql/ddl/catalog/desc/DescCatalogDesc.java  |  67 ++++++++++++
 .../ql/ddl/catalog/desc/DescCatalogFormatter.java  |  92 ++++++++++++++++
 .../ql/ddl/catalog/desc/DescCatalogOperation.java  |  58 ++++++++++
 .../ql/ddl/catalog/drop/DropCatalogAnalyzer.java   |  59 +++++++++++
 .../hive/ql/ddl/catalog/drop/DropCatalogDesc.java  |  50 +++++++++
 .../ql/ddl/catalog/drop/DropCatalogOperation.java  |  45 ++++++++
 .../ql/ddl/catalog/show/ShowCatalogsAnalyzer.java  |  57 ++++++++++
 .../hive/ql/ddl/catalog/show/ShowCatalogsDesc.java |  53 +++++++++
 .../ql/ddl/catalog/show/ShowCatalogsFormatter.java |  71 +++++++++++++
 .../ql/ddl/catalog/show/ShowCatalogsOperation.java |  67 ++++++++++++
 .../org/apache/hadoop/hive/ql/hooks/Entity.java    |  25 ++++-
 .../hadoop/hive/ql/hooks/HiveProtoLoggingHook.java |   3 +-
 .../apache/hadoop/hive/ql/hooks/ReadEntity.java    |   8 ++
 .../apache/hadoop/hive/ql/hooks/WriteEntity.java   |   6 ++
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   |  70 +++++++++++-
 .../hadoop/hive/ql/parse/BaseSemanticAnalyzer.java |  17 +++
 .../apache/hadoop/hive/ql/plan/HiveOperation.java  |   6 ++
 .../hive/ql/security/authorization/Privilege.java  |   3 +
 .../authorization/plugin/HiveOperationType.java    |   5 +
 .../plugin/sqlstd/Operation2Privilege.java         |   9 ++
 ql/src/test/queries/clientpositive/catalog.q       |  47 ++++++++
 .../test/results/clientpositive/llap/catalog.q.out | 118 +++++++++++++++++++++
 38 files changed, 1527 insertions(+), 8 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index d3c85832b7a..cd62ad1688c 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -493,6 +493,8 @@ public enum ErrorMsg {
   ICEBERG_COMPACTION_WITH_PART_SPEC_AND_FILTER_NOT_SUPPORTED(10441, 
"Compaction command with both partition spec and filter is not supported on 
Iceberg table {0}.{1}", true),
   COMPACTION_THREAD_INITIALIZATION(10442, "Compaction thread failed during 
initialization", false),
   ALTER_TABLE_COMPACTION_NON_PARTITIONED_COLUMN_NOT_ALLOWED(10443, "Filter 
expression can contain only partition columns."),
+  CATALOG_ALREADY_EXISTS(10444, "Catalog {0} already exists", true),
+  CATALOG_NOT_EXISTS(10445, "Catalog {0} does not exists:", true),
 
   //========================== 20000 range starts here 
========================//
 
diff --git a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java 
b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
index 5cc12e6cec9..15720d03fe1 100644
--- a/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
+++ b/common/src/java/org/apache/hadoop/hive/ql/log/PerfLogger.java
@@ -104,6 +104,7 @@ public class PerfLogger {
   public static final String STATS_TASK = "StatsTask";
 
   public static final String HIVE_GET_TABLE = "getTablesByType";
+  public static final String HIVE_GET_CATALOG = "getCatalog";
   public static final String HIVE_GET_DATABASE = "getDatabase";
   public static final String HIVE_GET_DATABASE_2 = "getDatabase2";
   public static final String HIVE_GET_PARTITIONS = "getPartitions";
diff --git 
a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
index 3e528476f51..d75e6cecab2 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
@@ -53,6 +53,7 @@ alterStatement
     | KW_ALTER (KW_DATABASE|KW_SCHEMA) alterDatabaseStatementSuffix -> 
alterDatabaseStatementSuffix
     | KW_ALTER KW_DATACONNECTOR alterDataConnectorStatementSuffix -> 
alterDataConnectorStatementSuffix
     | KW_OPTIMIZE KW_TABLE tableName optimizeTableStatementSuffix -> 
^(TOK_ALTERTABLE tableName optimizeTableStatementSuffix)
+    | KW_ALTER KW_CATALOG alterCatalogStatementSuffix -> 
alterCatalogStatementSuffix
     ;
 
 alterTableStatementSuffix
@@ -155,6 +156,19 @@ alterMaterializedViewSuffixRebuild[CommonTree 
tableNameTree]
     : KW_REBUILD -> ^(TOK_ALTER_MATERIALIZED_VIEW_REBUILD {$tableNameTree})
     ;
 
+alterCatalogStatementSuffix
+@init { gParent.pushMsg("alter catalog statement", state); }
+@after { gParent.popMsg(state); }
+    : alterCatalogSuffixSetLocation
+    ;
+
+alterCatalogSuffixSetLocation
+@init { gParent.pushMsg("alter catalog set location", state); }
+@after { gParent.popMsg(state); }
+    : catName=identifier KW_SET KW_LOCATION newLocation=StringLiteral
+    -> ^(TOK_ALTERCATALOG_LOCATION $catName $newLocation)
+    ;
+
 alterDatabaseStatementSuffix
 @init { gParent.pushMsg("alter database statement", state); }
 @after { gParent.popMsg(state); }
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
index 3d8b4ab7741..bf13e8c4ea9 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
@@ -216,6 +216,8 @@ KW_INTERSECT: 'INTERSECT';
 KW_VIEW: 'VIEW';
 KW_VIEWS: 'VIEWS';
 KW_IN: 'IN';
+KW_CATALOG: 'CATALOG';
+KW_CATALOGS: 'CATALOGS';
 KW_DATABASE: 'DATABASE';
 KW_DATABASES: 'DATABASES';
 KW_MATERIALIZED: 'MATERIALIZED';
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 497d2928a3b..1f7acf2f7dd 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -157,6 +157,7 @@ TOK_STRUCT;
 TOK_MAP;
 TOK_UNIONTYPE;
 TOK_COLTYPELIST;
+TOK_CREATECATALOG;
 TOK_CREATEDATABASE;
 TOK_CREATEDATACONNECTOR;
 TOK_CREATETABLE;
@@ -229,6 +230,7 @@ TOK_RETAIN;
 TOK_WITH_SNAPSHOT_RETENTION;
 TOK_ALTERTABLE_CONVERT;
 TOK_MSCK;
+TOK_SHOWCATALOGS;
 TOK_SHOWDATABASES;
 TOK_SHOWDATACONNECTORS;
 TOK_SHOWTABLES;
@@ -246,6 +248,7 @@ TOK_UNLOCKTABLE;
 TOK_LOCKDB;
 TOK_UNLOCKDB;
 TOK_SWITCHDATABASE;
+TOK_DROPCATALOG;
 TOK_DROPDATABASE;
 TOK_DROPTABLE;
 TOK_DATABASECOMMENT;
@@ -369,6 +372,10 @@ TOK_SHOW_ROLES;
 TOK_SHOW_CURRENT_ROLE;
 TOK_SHOW_ROLE_PRINCIPALS;
 TOK_SHOWDBLOCKS;
+TOK_DESCCATALOG;
+TOK_CATALOGLOCATION;
+TOK_CATALOGCOMMENT;
+TOK_ALTERCATALOG_LOCATION;
 TOK_DESCDATABASE;
 TOK_DATABASEPROPERTIES;
 TOK_DATABASELOCATION;
@@ -1000,7 +1007,9 @@ importStatement
 ddlStatement
 @init { pushMsg("ddl statement", state); }
 @after { popMsg(state); }
-    : createDatabaseStatement
+    : createCatalogStatement
+    | dropCatalogStatement
+    | createDatabaseStatement
     | switchDatabaseStatement
     | dropDatabaseStatement
     | createTableStatement
@@ -1102,6 +1111,38 @@ orReplace
     -> ^(TOK_ORREPLACE)
     ;
 
+createCatalogStatement
+@init { pushMsg("create catalog statement", state); }
+@after { popMsg(state); }
+    : KW_CREATE KW_CATALOG
+        ifNotExists?
+        name=identifier
+        catLocation
+        catalogComment?
+    -> ^(TOK_CREATECATALOG $name catLocation ifNotExists? catalogComment?)
+    ;
+
+catLocation
+@init { pushMsg("catalog location specification", state); }
+@after { popMsg(state); }
+    :
+      KW_LOCATION locn=StringLiteral -> ^(TOK_CATALOGLOCATION $locn)
+    ;
+
+catalogComment
+@init { pushMsg("catalog's comment", state); }
+@after { popMsg(state); }
+    : KW_COMMENT comment=StringLiteral
+    -> ^(TOK_CATALOGCOMMENT $comment)
+    ;
+
+dropCatalogStatement
+@init { pushMsg("drop catalog statement", state); }
+@after { popMsg(state); }
+    : KW_DROP KW_CATALOG ifExists? identifier
+    -> ^(TOK_DROPCATALOG identifier ifExists?)
+    ;
+
 createDatabaseStatement
 @init { pushMsg("create database statement", state); }
 @after { popMsg(state); }
@@ -1233,6 +1274,8 @@ descStatement
     :
     (KW_DESCRIBE|KW_DESC)
     (
+    (KW_CATALOG) => (KW_CATALOG) KW_EXTENDED? (catName=identifier) -> 
^(TOK_DESCCATALOG $catName KW_EXTENDED?)
+    |
     (KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) KW_EXTENDED? 
(dbName=identifier) -> ^(TOK_DESCDATABASE $dbName KW_EXTENDED?)
     |
     (KW_DATACONNECTOR) => (KW_DATACONNECTOR) KW_EXTENDED? (dcName=identifier) 
-> ^(TOK_DESCDATACONNECTOR $dcName KW_EXTENDED?)
@@ -1261,7 +1304,8 @@ analyzeStatement
 showStatement
 @init { pushMsg("show statement", state); }
 @after { popMsg(state); }
-    : KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? -> 
^(TOK_SHOWDATABASES showStmtIdentifier?)
+    : KW_SHOW KW_CATALOGS (KW_LIKE showStmtIdentifier)? -> ^(TOK_SHOWCATALOGS 
showStmtIdentifier?)
+    | KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? -> 
^(TOK_SHOWDATABASES showStmtIdentifier?)
     | KW_SHOW (isExtended=KW_EXTENDED)? KW_TABLES ((KW_FROM|KW_IN) 
db_name=identifier)? (filter=showTablesFilterExpr)?
     -> ^(TOK_SHOWTABLES (TOK_FROM $db_name)? $filter? $isExtended?)
     | KW_SHOW KW_VIEWS ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE 
showStmtIdentifier|showStmtIdentifier)?  -> ^(TOK_SHOWVIEWS (TOK_FROM 
$db_name)? showStmtIdentifier?)
diff --git 
a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g 
b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 3d219dbe294..7f08cb8828e 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -973,7 +973,7 @@ nonReserved
     :
     KW_ABORT | KW_ADD | KW_ADMIN | KW_AFTER | KW_ANALYZE | KW_ARCHIVE | KW_ASC 
| KW_BEFORE | KW_BUCKET | KW_BUCKETS
     | KW_CASCADE | KW_CBO | KW_CHANGE | KW_CHECK | KW_CLUSTER | KW_CLUSTERED | 
KW_CLUSTERSTATUS | KW_COLLECTION | KW_COLUMNS
-    | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | 
KW_CONTINUE | KW_COST | KW_DATA | KW_DAY
+    | KW_COMMENT | KW_COMPACT | KW_COMPACTIONS | KW_COMPUTE | KW_CONCATENATE | 
KW_CONTINUE | KW_COST | KW_DATA | KW_DAY | KW_CATALOG | KW_CATALOGS
     | KW_DATABASES | KW_DATETIME | KW_DBPROPERTIES | KW_DCPROPERTIES | 
KW_DEFERRED | KW_DEFINED | KW_DELIMITED | KW_DEPENDENCY
     | KW_DESC | KW_DIRECTORIES | KW_DIRECTORY | KW_DISABLE | KW_DISTRIBUTE | 
KW_DISTRIBUTED | KW_DOW | KW_ELEM_TYPE
     | KW_ENABLE | KW_ENFORCED | KW_ESCAPED | KW_EXCLUSIVE | KW_EXPLAIN | 
KW_EXPORT | KW_FIELDS | KW_FILE | KW_FILEFORMAT
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java
new file mode 100644
index 00000000000..414e9b57231
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogAnalyzer.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter;
+
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for catalog alteration commands.
+ */
+public abstract class AbstractAlterCatalogAnalyzer extends 
BaseSemanticAnalyzer {
+  public AbstractAlterCatalogAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    AbstractAlterCatalogDesc alterDesc = buildAlterCatalogDesc(root);
+    Catalog catalog = getCatalog(alterDesc.getCatalogName());
+    outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
alterDesc)));
+  }
+
+  protected abstract AbstractAlterCatalogDesc buildAlterCatalogDesc(ASTNode 
root)
+      throws SemanticException;
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java
new file mode 100644
index 00000000000..d144b2103f4
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogDesc.java
@@ -0,0 +1,42 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+import java.io.Serializable;
+
+/**
+ * DDL task description for ALTER CATALOG commands.
+ */
+public abstract class AbstractAlterCatalogDesc implements DDLDesc, 
Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String catalogName;
+
+  public AbstractAlterCatalogDesc(String catalogName) {
+    this.catalogName = catalogName;
+  }
+
+  @Explain(displayName="name", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+  public String getCatalogName() {
+    return catalogName;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java
new file mode 100644
index 00000000000..a78693e292e
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/AbstractAlterCatalogOperation.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter;
+
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of altering a catalog.
+ */
+public abstract class AbstractAlterCatalogOperation<T extends 
AbstractAlterCatalogDesc> extends DDLOperation<T> {
+  public AbstractAlterCatalogOperation(DDLOperationContext context, T desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws Exception {
+    String catalogName = desc.getCatalogName();
+    Catalog catalog = context.getDb().getMSC().getCatalog(catalogName);
+    if (catalog == null) {
+      throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, catalogName);
+    }
+    doAlteration(catalog);
+
+    context.getDb().alterCatalog(catalogName, catalog);
+    return 0;
+  }
+
+  protected abstract void doAlteration(Catalog catalog) throws HiveException;
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java
new file mode 100644
index 00000000000..ac380ae44ac
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationAnalyzer.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter.location;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import 
org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogAnalyzer;
+import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogDesc;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for catalog set location commands.
+ */
+@DDLType(types = HiveParser.TOK_ALTERCATALOG_LOCATION)
+public class AlterCatalogSetLocationAnalyzer extends 
AbstractAlterCatalogAnalyzer {
+  public AlterCatalogSetLocationAnalyzer(QueryState queryState) throws 
SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  protected AbstractAlterCatalogDesc buildAlterCatalogDesc(ASTNode root) 
throws SemanticException {
+    String catalogName = getUnescapedName((ASTNode) root.getChild(0));
+    String newLocation = unescapeSQLString(root.getChild(1).getText());
+
+    outputs.add(toWriteEntity(newLocation));
+
+    return new AlterCatalogSetLocationDesc(catalogName, newLocation);
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java
new file mode 100644
index 00000000000..22a4034ea48
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationDesc.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter.location;
+
+import org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+/**
+ * DDL task description for ALTER CATALOG ... SET LOCATION ... commands.
+ */
+@Explain(displayName = "Set Catalog Location", explainLevels = { 
Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+public class AlterCatalogSetLocationDesc extends AbstractAlterCatalogDesc {
+  private final String location;
+
+  public AlterCatalogSetLocationDesc(String catalogName, String location) {
+    super(catalogName);
+    this.location = location;
+  }
+
+  @Explain(displayName="location")
+  public String getLocation() {
+    return location;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java
new file mode 100644
index 00000000000..d2974f58bb5
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/alter/location/AlterCatalogSetLocationOperation.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.alter.location;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import 
org.apache.hadoop.hive.ql.ddl.catalog.alter.AbstractAlterCatalogOperation;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+
+/**
+ * Operation process of altering a catalog's location.
+ */
+public class AlterCatalogSetLocationOperation extends 
AbstractAlterCatalogOperation<AlterCatalogSetLocationDesc> {
+  public AlterCatalogSetLocationOperation(DDLOperationContext context, 
AlterCatalogSetLocationDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  protected void doAlteration(Catalog catalog) throws HiveException {
+    try {
+      String newLocation = Utilities.getQualifiedPath(context.getConf(), new 
Path(desc.getLocation()));
+
+      URI locationURI = new URI(newLocation);
+      if (!locationURI.isAbsolute()) {
+        throw new HiveException(ErrorMsg.BAD_LOCATION_VALUE, newLocation);
+      }
+
+      if (newLocation.equals(catalog.getLocationUri())) {
+        LOG.info("AlterCatalog skipped. No change in location.");
+      } else {
+        catalog.setLocationUri(newLocation);
+        LOG.info("Catalog location changed from {} to {}", 
catalog.getLocationUri(), newLocation);
+      }
+    } catch (URISyntaxException e) {
+      throw new HiveException(e);
+    }
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java
new file mode 100644
index 00000000000..4357f7cf465
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogAnalyzer.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.create;
+
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for catalog creation commands.
+ */
[email protected](types = HiveParser.TOK_CREATECATALOG)
+public class CreateCatalogAnalyzer extends BaseSemanticAnalyzer {
+  public CreateCatalogAnalyzer(QueryState queryState) throws SemanticException 
{
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String catalogName = unescapeIdentifier(root.getChild(0).getText());
+    String locationUrl = 
unescapeSQLString(root.getChild(1).getChild(0).getText());
+    outputs.add(toWriteEntity(locationUrl));
+
+    boolean ifNotExists = false;
+    String comment = null;
+
+    for (int i = 2; i < root.getChildCount(); i++) {
+      ASTNode childNode = (ASTNode) root.getChild(i);
+      switch (childNode.getToken().getType()) {
+        case HiveParser.TOK_IFNOTEXISTS:
+          ifNotExists = true;
+          break;
+        case HiveParser.TOK_CATALOGCOMMENT:
+          comment = unescapeSQLString(childNode.getChild(0).getText());
+          break;
+        default:
+          throw new SemanticException("Unrecognized token in CREATE CATALOG 
statement");
+      }
+    }
+
+    CreateCatalogDesc desc = new CreateCatalogDesc(catalogName, comment, 
locationUrl, ifNotExists);
+    Catalog catalog = new Catalog(catalogName, locationUrl);
+
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
+    outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java
new file mode 100644
index 00000000000..030c4a8a6f7
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogDesc.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.create;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+import java.io.Serializable;
+
+/**
+ * DDL task description for CREATE CATALOG commands.
+ */
+@Explain(displayName = "Create CATALOG", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+public class CreateCatalogDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String catalogName;
+  private final String comment;
+  private final String locationUri;
+  private final boolean ifNotExists;
+
+  public CreateCatalogDesc(String catalogName, String comment, String 
locationUri, boolean ifNotExists) {
+    this.catalogName = catalogName;
+    this.comment = comment;
+    this.locationUri = locationUri;
+    this.ifNotExists = ifNotExists;
+  }
+
+  @Explain(displayName="name", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+  public String getName() {
+    return catalogName;
+  }
+
+  @Explain(displayName="comment")
+  public String getComment() {
+    return comment;
+  }
+
+  @Explain(displayName="locationUri")
+  public String getLocationUri() {
+    return locationUri;
+  }
+
+  @Explain(displayName="if not exists", displayOnlyOnTrue = true)
+  public boolean isIfNotExists() {
+    return ifNotExists;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java
new file mode 100644
index 00000000000..8b3db7cc537
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/create/CreateCatalogOperation.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.create;
+
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of creating a catalog.
+ */
+public class CreateCatalogOperation extends DDLOperation<CreateCatalogDesc> {
+  public CreateCatalogOperation(DDLOperationContext context, CreateCatalogDesc 
desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws Exception {
+    Catalog catalog = new Catalog(desc.getName(), desc.getLocationUri());
+    catalog.setDescription(desc.getComment());
+
+    try {
+      context.getDb().createCatalog(catalog, desc.isIfNotExists());
+    } catch (AlreadyExistsException e) {
+      throw new HiveException(e, ErrorMsg.CATALOG_ALREADY_EXISTS, 
desc.getName());
+    }
+    return 0;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java
new file mode 100644
index 00000000000..c00d7bf105b
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogAnalyzer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.desc;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for catalog description commands.
+ */
+@DDLType(types = HiveParser.TOK_DESCCATALOG)
+public class DescCatalogAnalyzer extends BaseSemanticAnalyzer {
+  public DescCatalogAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() == 0 || root.getChildCount() > 2) {
+      throw new SemanticException("Unexpected Tokens at DESCRIBE CATALOG");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String catalogName = root.getChild(0).getText();
+    boolean isExtended = root.getChildCount() == 2;
+
+    inputs.add(new ReadEntity(getCatalog(catalogName)));
+
+    DescCatalogDesc desc = new DescCatalogDesc(ctx.getResFile(), catalogName, 
isExtended);
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), 
getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    setFetchTask(createFetchTask(desc.getSchema()));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java
new file mode 100644
index 00000000000..0f5e9eaf9b2
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogDesc.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.desc;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+import java.io.Serializable;
+
+/**
+ * DDL task description for DESC CATALOG commands.
+ */
+@Explain(displayName = "Describe Catalog", explainLevels = { 
Explain.Level.USER, Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+public class DescCatalogDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  public static final String DESC_CATALOG_SCHEMA = 
"cat_name,comment,location#string:string:string";
+
+  public static final String DESC_CATALOG_SCHEMA_EXTENDED = 
"cat_name,comment,location,create_time#string:string:string:string";
+
+  private final String resFile;
+  private final String catName;
+  private final boolean isExtended;
+
+  public DescCatalogDesc(Path resFile, String catName, boolean isExtended) {
+    this.resFile = resFile.toString();
+    this.catName = catName;
+    this.isExtended = isExtended;
+  }
+
+  @Explain(displayName = "result file", explainLevels = { 
Explain.Level.EXTENDED })
+  public String getResFile() {
+    return resFile;
+  }
+
+  @Explain(displayName = "catalog", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+  public String getCatName() {
+    return catName;
+  }
+
+  @Explain(displayName = "extended", displayOnlyOnTrue=true,
+      explainLevels = { Explain.Level.USER, Explain.Level.DEFAULT, 
Explain.Level.EXTENDED })
+  public boolean isExtended() {
+    return isExtended;
+  }
+
+  public String getSchema() {
+    return isExtended ? DESC_CATALOG_SCHEMA_EXTENDED : DESC_CATALOG_SCHEMA;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java
new file mode 100644
index 00000000000..c89e23b5c34
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogFormatter.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.desc;
+
+import org.apache.hadoop.hive.common.type.CalendarUtils;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.ShowUtils;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.formatting.MapBuilder;
+import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
+import org.apache.hive.common.util.HiveStringUtils;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Formats DESC CATALOG results.
+ */
+abstract class DescCatalogFormatter {
+  static DescCatalogFormatter getFormatter(HiveConf hiveConf) {
+    if (MetaDataFormatUtils.isJson(hiveConf)) {
+      return new JsonDescCatalogFormatter();
+    }
+    return new TextDescCatalogFormatter();
+  }
+
+  abstract void showCatalogDescription(DataOutputStream out, String catalog, 
String comment, String location,
+      int createTime) throws HiveException;
+
+  // ------ Implementations ------
+  static class JsonDescCatalogFormatter extends DescCatalogFormatter {
+    @Override
+    void showCatalogDescription(DataOutputStream out, String catalog, String 
comment, String location,
+        int createTime) throws HiveException {
+      MapBuilder builder = MapBuilder.create()
+          .put("catalog", catalog)
+          .put("comment", comment)
+          .put("location", location);
+      if (createTime != 0) {
+        builder.put("createTime", CalendarUtils.formatTimestamp((long) 
createTime * 1000, true));
+      }
+      ShowUtils.asJson(out, builder.build());
+    }
+  }
+
+  static class TextDescCatalogFormatter extends DescCatalogFormatter {
+    @Override
+    void showCatalogDescription(DataOutputStream out, String catalog, String 
comment, String location,
+        int createTime) throws HiveException {
+      try {
+        writeLine(out, "Catalog Name", catalog);
+        if (comment != null) {
+          writeLine(out, "Comment", HiveStringUtils.escapeJava(comment));
+        }
+        if (location != null) {
+          writeLine(out, "Location", location);
+        }
+        if (createTime != 0) {
+          String createTimeStr = CalendarUtils.formatTimestamp((long) 
createTime * 1000, true);
+          writeLine(out, "CreateTime", createTimeStr);
+        }
+      } catch (IOException e) {
+        throw new HiveException(e);
+      }
+    }
+
+    private void writeLine(DataOutputStream out, String label, String value) 
throws IOException {
+      out.write(label.getBytes(StandardCharsets.UTF_8));
+      out.write(Utilities.tabCode);
+      out.write(value.getBytes(StandardCharsets.UTF_8));
+      out.write(Utilities.newLineCode);
+    }
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java
new file mode 100644
index 00000000000..6509830991b
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/desc/DescCatalogOperation.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.desc;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.ShowUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+import java.io.DataOutputStream;
+
+/**
+ * Operation process of describing a catalog.
+ */
+public class DescCatalogOperation extends DDLOperation<DescCatalogDesc> {
+  public DescCatalogOperation(DDLOperationContext context, DescCatalogDesc 
desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws Exception {
+    try (DataOutputStream outStream = ShowUtils.getOutputStream(new 
Path(desc.getResFile()), context)) {
+      Catalog catalog = context.getDb().getMSC().getCatalog(desc.getCatName());
+      if (catalog == null) {
+        throw new HiveException(ErrorMsg.CATALOG_NOT_EXISTS, 
desc.getCatName());
+      }
+      int createTime = 0;
+      if (desc.isExtended()) {
+        createTime = catalog.getCreateTime();
+      }
+      DescCatalogFormatter formatter = 
DescCatalogFormatter.getFormatter(context.getConf());
+      formatter.showCatalogDescription(outStream, catalog.getName(), 
catalog.getDescription(),
+          catalog.getLocationUri(), createTime);
+    } catch (Exception e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR);
+    }
+    return 0;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java
new file mode 100644
index 00000000000..e4e5d6d7da9
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogAnalyzer.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.drop;
+
+import org.apache.hadoop.hive.metastore.api.Catalog;
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.ReadEntity;
+import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for catalog dropping commands.
+ */
+@DDLType (types = HiveParser.TOK_DROPCATALOG)
+public class DropCatalogAnalyzer extends BaseSemanticAnalyzer {
+  public DropCatalogAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    String catalogName = unescapeIdentifier(root.getChild(0).getText());
+    boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != 
null;
+
+    Catalog catalog = getCatalog(catalogName, !ifExists);
+    if (catalog == null) {
+      return;
+    }
+
+    // Drop catalog is non-cascaded, so we do not need to add databases to 
outputs.
+    inputs.add(new ReadEntity(catalog));
+    outputs.add(new WriteEntity(catalog, WriteEntity.WriteType.DDL_NO_LOCK));
+
+    DropCatalogDesc desc = new DropCatalogDesc(catalogName, ifExists);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java
new file mode 100644
index 00000000000..348ae8f84a4
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogDesc.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.drop;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+import java.io.Serializable;
+
+/**
+ * DDL task description for DROP CATALOG commands.
+ */
+@Explain(displayName = "Drop Catalog", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+public class DropCatalogDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String catalogName;
+  private final boolean ifExists;
+
+  public DropCatalogDesc(String catalogName, boolean ifExists) {
+    this.catalogName = catalogName;
+    this.ifExists = ifExists;
+  }
+
+  @Explain(displayName = "catalog", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+  public String getCatalogName() {
+    return catalogName;
+  }
+
+  @Explain(displayName = "if exists")
+  public boolean getIfExists() {
+    return ifExists;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java
new file mode 100644
index 00000000000..f6574c0995a
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/drop/DropCatalogOperation.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.drop;
+
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+
+/**
+ * Operation process of dropping a catalog.
+ */
+public class DropCatalogOperation extends DDLOperation<DropCatalogDesc> {
+  public DropCatalogOperation(DDLOperationContext context, DropCatalogDesc 
desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws Exception {
+    String catName = desc.getCatalogName();
+    try {
+      context.getDb().dropCatalog(catName, desc.getIfExists());
+    } catch (NoSuchObjectException e) {
+      throw new HiveException(e, ErrorMsg.CATALOG_NOT_EXISTS, 
desc.getCatalogName());
+    }
+    return 0;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java
new file mode 100644
index 00000000000..0a1c9ff07a0
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsAnalyzer.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.show;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+
+/**
+ * Analyzer for show catalogs commands.
+ */
[email protected](types = HiveParser.TOK_SHOWCATALOGS)
+public class ShowCatalogsAnalyzer extends BaseSemanticAnalyzer {
+  public ShowCatalogsAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  @Override
+  public void analyzeInternal(ASTNode root) throws SemanticException {
+    if (root.getChildCount() > 1) {
+      throw new SemanticException("Unexpected Tokens at SHOW CATALOGS");
+    }
+
+    ctx.setResFile(ctx.getLocalTmpPath());
+
+    String catalogPattern = root.getChildCount() == 1 ? 
unescapeSQLString(root.getChild(0).getText()) : null;
+    ShowCatalogsDesc desc = new ShowCatalogsDesc(ctx.getResFile(), 
catalogPattern);
+
+    Task<DDLWork> task = TaskFactory.get(new DDLWork(getInputs(), 
getOutputs(), desc));
+    rootTasks.add(task);
+
+    task.setFetchSource(true);
+    setFetchTask(createFetchTask(ShowCatalogsDesc.SHOW_CATALOGS_SCHEMA));
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java
new file mode 100644
index 00000000000..8f4ab47c174
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsDesc.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.show;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+import java.io.Serializable;
+
+/**
+ * DDL task description for SHOW CATALOGS commands.
+ */
+@Explain(displayName = "Show Catalogs", explainLevels = { Explain.Level.USER, 
Explain.Level.DEFAULT, Explain.Level.EXTENDED })
+public class ShowCatalogsDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  public static final String SHOW_CATALOGS_SCHEMA = "catalog_name#string";
+
+  private final String resFile;
+  private final String pattern;
+
+  public ShowCatalogsDesc(Path resFile, String pattern) {
+    this.resFile = resFile.toString();
+    this.pattern = pattern;
+  }
+
+  @Explain(displayName = "pattern")
+  public String getPattern() {
+    return pattern;
+  }
+
+  @Explain(displayName = "result file", explainLevels = { 
Explain.Level.EXTENDED })
+  public String getResFile() {
+    return resFile;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java
new file mode 100644
index 00000000000..9625c6cc635
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsFormatter.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.show;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.ddl.ShowUtils;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.formatting.MapBuilder;
+import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.List;
+
+/**
+ * Formats SHOW CATALOGS results.
+ */
+abstract class ShowCatalogsFormatter {
+  public static ShowCatalogsFormatter getFormatter(HiveConf conf) {
+    if (MetaDataFormatUtils.isJson(conf)) {
+      return new JsonShowCatalogsFormatter();
+    } else {
+      return new TextShowCatalogsFormatter();
+    }
+  }
+
+  abstract void showCatalogs(DataOutputStream out, List<String> catalogs) 
throws HiveException;
+
+
+  // ------ Implementations ------
+
+  static class JsonShowCatalogsFormatter extends ShowCatalogsFormatter {
+    @Override
+    void showCatalogs(DataOutputStream out, List<String> catalogs) throws 
HiveException {
+      ShowUtils.asJson(out, MapBuilder.create().put("catalogs", 
catalogs).build());
+    }
+  }
+
+  static class TextShowCatalogsFormatter extends ShowCatalogsFormatter {
+    @Override
+    void showCatalogs(DataOutputStream out, List<String> catalogs) throws 
HiveException {
+      try {
+        for (String catalog : catalogs) {
+          out.write(catalog.getBytes(StandardCharsets.UTF_8));
+          out.write(Utilities.newLineCode);
+        }
+      } catch (IOException e) {
+        throw new HiveException(e);
+      }
+    }
+  }
+
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java
new file mode 100644
index 00000000000..c3011fab332
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/catalog/show/ShowCatalogsOperation.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.catalog.show;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.ShowUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.udf.UDFLike;
+import org.apache.hadoop.io.IOUtils;
+
+import java.io.DataOutputStream;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+/**
+ * Operation process of locking a catalog.
+ */
+public class ShowCatalogsOperation extends DDLOperation<ShowCatalogsDesc> {
+
+  public ShowCatalogsOperation(DDLOperationContext context, ShowCatalogsDesc 
desc) {
+    super(context, desc);
+  }
+
+  @Override public int execute() throws Exception {
+    List<String> catalogs = context.getDb().getMSC().getCatalogs();
+    if (desc.getPattern() != null) {
+      LOG.debug("pattern: {}", desc.getPattern());
+      Pattern pattern = 
Pattern.compile(UDFLike.likePatternToRegExp(desc.getPattern()), 
Pattern.CASE_INSENSITIVE);
+      catalogs = catalogs.stream().filter(name -> 
pattern.matcher(name).matches()).collect(Collectors.toList());
+    }
+
+    LOG.info("Found {} catalog(s) matching the SHOW CATALOGS statement.", 
catalogs.size());
+
+    // write the results in the file
+    DataOutputStream outStream = ShowUtils.getOutputStream(new 
Path(desc.getResFile()), context);
+    try {
+      ShowCatalogsFormatter formatter = 
ShowCatalogsFormatter.getFormatter(context.getConf());
+      formatter.showCatalogs(outStream, catalogs);
+    } catch (Exception e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "show catalogs");
+    } finally {
+      IOUtils.closeStream(outStream);
+    }
+
+    return 0;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
index 72714e6297f..b85988e55a8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
@@ -23,6 +23,7 @@
 import java.util.Map;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.Catalog;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.Function;
@@ -43,9 +44,14 @@ public class Entity implements Serializable {
    * The type of the entity.
    */
   public static enum Type {
-    DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION, 
SERVICE_NAME, DATACONNECTOR
+    DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION, 
SERVICE_NAME, DATACONNECTOR, CATALOG
   }
 
+  /**
+   * The catalog if this is a catalog.
+   */
+  private Catalog catalog;
+
   /**
    * The database if this is a database.
    */
@@ -116,6 +122,14 @@ public String getName() {
     return name;
   }
 
+  public Catalog getCatalog() {
+    return catalog;
+  }
+
+  public void setCatalog(Catalog catalog) {
+    this.catalog = catalog;
+  }
+
   public Database getDatabase() {
     return database;
   }
@@ -201,6 +215,13 @@ public Entity() {
     name = null;
   }
 
+  public Entity(Catalog catalog, boolean complete) {
+    this.catalog = catalog;
+    this.typ = Type.CATALOG;
+    this.name = computeName();
+    this.complete = complete;
+  }
+
   /**
    * Constructor for a database.
    *
@@ -443,6 +464,8 @@ private String doComputeName() {
       return stringObject;
     case DATACONNECTOR:
       return "connector:" + connector.getName();
+    case CATALOG:
+      return "catalog:" + catalog.getName();
     default:
       return d.toString();
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java
index c0e3bb30054..38c0a1419de 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/HiveProtoLoggingHook.java
@@ -20,6 +20,7 @@
 
 import static org.apache.hadoop.hive.ql.hooks.Entity.Type.PARTITION;
 import static org.apache.hadoop.hive.ql.hooks.Entity.Type.TABLE;
+import static 
org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERCATALOG_LOCATION;
 import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERDATABASE;
 import static org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERDATABASE_OWNER;
 import static 
org.apache.hadoop.hive.ql.plan.HiveOperation.ALTERPARTITION_BUCKETNUM;
@@ -151,7 +152,7 @@ public class HiveProtoLoggingHook implements 
ExecuteWithHookContext {
         RELOADFUNCTION, CREATEMACRO, DROPMACRO, CREATEVIEW, DROPVIEW, 
ALTERVIEW_PROPERTIES,
         LOCKTABLE, UNLOCKTABLE, CREATEROLE, DROPROLE, ALTERTABLE_FILEFORMAT,
         ALTERPARTITION_FILEFORMAT, ALTERTABLE_LOCATION, 
ALTERPARTITION_LOCATION, CREATETABLE,
-        TRUNCATETABLE, CREATETABLE_AS_SELECT, QUERY, ALTERDATABASE, 
ALTERDATABASE_OWNER,
+        TRUNCATETABLE, CREATETABLE_AS_SELECT, QUERY, ALTERCATALOG_LOCATION, 
ALTERDATABASE, ALTERDATABASE_OWNER,
         ALTERTABLE_MERGEFILES, ALTERPARTITION_MERGEFILES, ALTERTABLE_SKEWED,
         ALTERTBLPART_SKEWED_LOCATION, ALTERTABLE_PARTCOLTYPE, 
ALTERTABLE_EXCHANGEPARTITION,
         ALTERTABLE_DROPCONSTRAINT, ALTERTABLE_ADDCONSTRAINT, ALTERVIEW_RENAME, 
ALTERVIEW_AS,
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
index 68c139fd471..4c94a5f58ab 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ReadEntity.java
@@ -25,6 +25,7 @@
 import java.util.Set;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.metastore.api.Catalog;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Function;
@@ -76,6 +77,13 @@ public ReadEntity() {
     super();
   }
 
+  /**
+   * Constructor for a catalog.
+   */
+  public ReadEntity(Catalog catalog) {
+    super(catalog, true);
+  }
+
   /**
    * Constructor for a database.
    */
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
index d3dd2c23504..6563e460970 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
@@ -20,6 +20,7 @@
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Catalog;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.Function;
@@ -65,6 +66,11 @@ public WriteEntity() {
     super();
   }
 
+  public WriteEntity(Catalog catalog, WriteType type) {
+    super(catalog, true);
+    setWriteTypeInternal(type);
+  }
+
   public WriteEntity(Database database, WriteType type) {
     super(database, true);
     setWriteTypeInternal(type);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 3debb1ee4e4..cf9c84814f5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -29,7 +29,6 @@
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 import static org.apache.hadoop.hive.common.AcidConstants.SOFT_DELETE_TABLE;
-
 import static 
org.apache.hadoop.hive.conf.Constants.MATERIALIZED_VIEW_REWRITING_TIME_WINDOW;
 import static 
org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_LOAD_DYNAMIC_PARTITIONS_SCAN_SPECIFIC_PARTITIONS;
 import static 
org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_WRITE_NOTIFICATION_MAX_BATCH_SIZE;
@@ -144,6 +143,7 @@
 import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.AllTableConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Catalog;
 import org.apache.hadoop.hive.metastore.api.CheckConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.CmRecycleRequest;
 import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
@@ -151,8 +151,8 @@
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.CompactionResponse;
 import org.apache.hadoop.hive.metastore.api.CompactionType;
-import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
+import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.DropDatabaseRequest;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
@@ -211,8 +211,8 @@
 import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMTrigger;
 import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
-import org.apache.hadoop.hive.metastore.api.WriteNotificationLogRequest;
 import org.apache.hadoop.hive.metastore.api.WriteNotificationLogBatchRequest;
+import org.apache.hadoop.hive.metastore.api.WriteNotificationLogRequest;
 import org.apache.hadoop.hive.metastore.api.AbortCompactionRequest;
 import org.apache.hadoop.hive.metastore.api.AbortCompactResponse;
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
@@ -624,6 +624,46 @@ public void close(boolean forceClose) {
     }
   }
 
+  /**
+   * Create a catalog
+   * @param catalog
+   * @param ifNotExist if true, will ignore AlreadyExistsException exception
+   * @throws AlreadyExistsException
+   * @throws HiveException
+   */
+  public void createCatalog(Catalog catalog, boolean ifNotExist)
+      throws AlreadyExistsException, HiveException {
+    try {
+      getMSC().createCatalog(catalog);
+    } catch (AlreadyExistsException e) {
+      if (!ifNotExist) {
+        throw e;
+      }
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
+  /**
+   * Drop a catalog.
+   * @param catName
+   * @param ignoreUnknownCat if true, will ignore NoSuchObjectException.
+   * @throws HiveException
+   * @throws NoSuchObjectException
+   */
+  public void dropCatalog(String catName, boolean ignoreUnknownCat)
+      throws HiveException, NoSuchObjectException {
+    try {
+      getMSC().dropCatalog(catName);
+    } catch (NoSuchObjectException e) {
+      if (!ignoreUnknownCat) {
+        throw e;
+      }
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+
   /**
    * Create a database
    * @param db
@@ -2472,6 +2512,30 @@ public void validateDatabaseExists(String databaseName) 
throws SemanticException
     }
   }
 
+  public Catalog getCatalog(String catName) throws HiveException {
+    PerfLogger perfLogger = SessionState.getPerfLogger();
+    perfLogger.perfLogBegin(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG);
+    try {
+      return getMSC().getCatalog(catName);
+    } catch (NoSuchObjectException e) {
+      return null;
+    } catch (Exception e) {
+      throw new HiveException(e);
+    } finally {
+      perfLogger.perfLogEnd(CLASS_NAME, PerfLogger.HIVE_GET_CATALOG, 
"HS2-cache");
+    }
+  }
+
+  public void alterCatalog(String catName, Catalog catalog) throws 
HiveException {
+    try {
+      getMSC().alterCatalog(catName, catalog);
+    } catch (NoSuchObjectException e) {
+      throw new HiveException("Catalog " + catName + " does not exists.", e);
+    } catch (TException e) {
+      throw new HiveException("Unable to alter catalog " + catName + ". " + 
e.getMessage(), e);
+    }
+  }
+
   /**
    * Query metadata to see if a database with the given name already exists.
    *
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 238b5aa9668..99a00ffd5fd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -47,6 +47,7 @@
 import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.conf.Constants;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Catalog;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -1869,6 +1870,22 @@ public static Path tryQualifyPath(Path path, HiveConf 
conf) {
     }
   }
 
+  protected Catalog getCatalog(String catName) throws SemanticException {
+    return getCatalog(catName, true);
+  }
+
+  protected Catalog getCatalog(String catName, boolean throwException) throws 
SemanticException {
+    try {
+      Catalog catalog = db.getCatalog(catName);
+      if (catalog == null && throwException) {
+        throw new 
SemanticException(ErrorMsg.CATALOG_NOT_EXISTS.getMsg(catName));
+      }
+      return catalog;
+    } catch (Exception e) {
+      throw new SemanticException("Failed to retrieve catalog " + catName + ": 
" + e.getMessage(), e);
+    }
+  }
+
   protected Database getDatabase(String dbName) throws SemanticException {
     return getDatabase(dbName, true);
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index 9c982a36550..a045114720b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -32,6 +32,8 @@ public enum HiveOperation {
   REPLDUMP("REPLDUMP", HiveParser.TOK_REPL_DUMP, new 
Privilege[]{Privilege.ALL}, null),
   REPLLOAD("REPLLOAD", HiveParser.TOK_REPL_LOAD, null, new 
Privilege[]{Privilege.ALL}),
   REPLSTATUS("REPLSTATUS", HiveParser.TOK_REPL_STATUS, new 
Privilege[]{Privilege.SELECT}, null),
+  CREATECATALOG("CREATECATALOG", HiveParser.TOK_CREATECATALOG, null, new 
Privilege[]{Privilege.CREATE}),
+  DROPCATALOG("DROPCATALOG", HiveParser.TOK_DROPCATALOG, null, new 
Privilege[]{Privilege.DROP}),
   CREATEDATABASE("CREATEDATABASE", HiveParser.TOK_CREATEDATABASE, null, new 
Privilege[]{Privilege.CREATE}),
   CREATEDATACONNECTOR("CREATEDATACONNECTOR", 
HiveParser.TOK_CREATEDATACONNECTOR, null, new Privilege[]{Privilege.CREATE}),
   DROPDATABASE("DROPDATABASE", HiveParser.TOK_DROPDATABASE, null, new 
Privilege[]{Privilege.DROP}),
@@ -102,6 +104,8 @@ public enum HiveOperation {
       new Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERPARTITION_BUCKETNUM("ALTERPARTITION_BUCKETNUM", 
HiveParser.TOK_ALTERPARTITION_BUCKETS,
       new Privilege[]{Privilege.ALTER_METADATA}, null),
+  SHOWCATALOGS("SHOWCATALOGS", HiveParser.TOK_SHOWCATALOGS, new 
Privilege[]{Privilege.SHOW_CATALOG}, null, true,
+      false),
   SHOWDATABASES("SHOWDATABASES", HiveParser.TOK_SHOWDATABASES, new 
Privilege[]{Privilege.SHOW_DATABASE}, null, true,
       false),
   SHOWDATACONNECTORS("SHOWDATACONNECTORS", HiveParser.TOK_SHOWDATACONNECTORS, 
new Privilege[]{Privilege.SHOW_DATABASE}, null, true,
@@ -165,11 +169,13 @@ public enum HiveOperation {
       new Privilege[]{Privilege.CREATE}),
   QUERY("QUERY", HiveParser.TOK_QUERY, new Privilege[]{Privilege.SELECT},
       new Privilege[]{Privilege.ALTER_DATA, Privilege.CREATE}, true, false),
+  ALTERCATALOG_LOCATION("ALTERCATALOG_LOCATION", 
HiveParser.TOK_ALTERCATALOG_LOCATION, new 
Privilege[]{Privilege.ALTER_METADATA}, null),
   ALTERDATABASE("ALTERDATABASE", HiveParser.TOK_ALTERDATABASE_PROPERTIES, 
null, null),
   ALTERDATABASE_OWNER("ALTERDATABASE_OWNER", 
HiveParser.TOK_ALTERDATABASE_OWNER, null, null),
   ALTERDATABASE_LOCATION("ALTERDATABASE_LOCATION",
       new int[] {HiveParser.TOK_ALTERDATABASE_LOCATION, 
HiveParser.TOK_ALTERDATABASE_MANAGEDLOCATION},
       new Privilege[]{Privilege.ALTER_DATA}, null),
+  DESCCATALOG("DESCCATALOG", HiveParser.TOK_DESCCATALOG, null, null),
   DESCDATABASE("DESCDATABASE", HiveParser.TOK_DESCDATABASE, null, null),
   ALTERDATACONNECTOR("ALTERDATACONNECTOR", 
HiveParser.TOK_ALTERDATACONNECTOR_PROPERTIES, null, null),
   ALTERDATACONNECTOR_OWNER("ALTERDATABASE_OWNER", 
HiveParser.TOK_ALTERDATACONNECTOR_OWNER, null, null),
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
index 9ce8b0754bd..4b88fab806e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
@@ -117,6 +117,9 @@ public Privilege() {
   public static Privilege DELETE = new Privilege(PrivilegeType.DELETE,
       PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN);
 
+  public static Privilege SHOW_CATALOG = new Privilege(PrivilegeType.SELECT,
+      EnumSet.of(PrivilegeScope.USER_LEVEL_SCOPE));
+
   public static Privilege SHOW_DATABASE = new 
Privilege(PrivilegeType.SHOW_DATABASE,
       EnumSet.of(PrivilegeScope.USER_LEVEL_SCOPE));
 
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
index 2578c570787..0fbcf6d22aa 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
@@ -33,8 +33,10 @@ public enum HiveOperationType {
   REPLDUMP,
   REPLLOAD,
   REPLSTATUS,
+  CREATECATALOG,
   CREATEDATABASE,
   CREATEDATACONNECTOR,
+  DROPCATALOG,
   DROPDATABASE,
   DROPDATACONNECTOR,
   SWITCHDATABASE,
@@ -75,6 +77,7 @@ public enum HiveOperationType {
   ALTERPARTITION_BUCKETNUM,
   ALTERTABLE_UPDATETABLESTATS,
   ALTERTABLE_UPDATEPARTSTATS,
+  SHOWCATALOGS,
   SHOWDATABASES,
   SHOWDATACONNECTORS,
   SHOWTABLES,
@@ -121,12 +124,14 @@ public enum HiveOperationType {
   TRUNCATETABLE,
   CREATETABLE_AS_SELECT,
   QUERY,
+  ALTERCATALOG_LOCATION,
   ALTERDATABASE,
   ALTERDATABASE_OWNER,
   ALTERDATABASE_LOCATION,
   ALTERDATACONNECTOR,
   ALTERDATACONNECTOR_OWNER,
   ALTERDATACONNECTOR_URL,
+  DESCCATALOG,
   DESCDATABASE,
   DESCDATACONNECTOR,
   ALTERTABLE_MERGEFILES,
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
index 483be4f73d1..b727a3a6d25 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
@@ -470,6 +470,15 @@ public HivePrivilegeObjectType getObjectType() {
     op2Priv.put(HiveOperationType.ALTERDATACONNECTOR_URL, 
PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR));
     op2Priv.put(HiveOperationType.DESCDATACONNECTOR, 
PrivRequirement.newIOPrivRequirement(null, null));
     op2Priv.put(HiveOperationType.SHOWDATACONNECTORS, 
PrivRequirement.newIOPrivRequirement(null, null));
+
+    op2Priv.put(HiveOperationType.CREATECATALOG, 
PrivRequirement.newPrivRequirementList(
+        new PrivRequirement(INS_SEL_DEL_NOGRANT_AR, 
HivePrivilegeObjectType.DFS_URI),
+        new PrivRequirement(INS_SEL_DEL_NOGRANT_AR, 
HivePrivilegeObjectType.LOCAL_URI),
+        new PrivRequirement(ADMIN_PRIV_AR, IOType.OUTPUT)));
+    op2Priv.put(HiveOperationType.DROPCATALOG, 
PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.ALTERCATALOG_LOCATION, 
PrivRequirement.newIOPrivRequirement(null, ADMIN_PRIV_AR));
+    op2Priv.put(HiveOperationType.DESCCATALOG, 
PrivRequirement.newIOPrivRequirement(null, null));
+    op2Priv.put(HiveOperationType.SHOWCATALOGS, 
PrivRequirement.newIOPrivRequirement(null, null));
   }
 
   /**
diff --git a/ql/src/test/queries/clientpositive/catalog.q 
b/ql/src/test/queries/clientpositive/catalog.q
new file mode 100644
index 00000000000..173e9a065da
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/catalog.q
@@ -0,0 +1,47 @@
+set hive.mapred.mode=nonstrict;
+set hive.support.concurrency = true;
+
+-- SORT_QUERY_RESULTS
+SHOW CATALOGS;
+
+-- CREATE with comment
+CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog';
+
+-- DESCRIBE
+DESC CATALOG test_cat;
+
+-- CREATE INE already exists
+CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat';
+SHOW CATALOGS;
+
+-- DROP
+DROP CATALOG test_cat;
+SHOW CATALOGS;
+
+-- CREATE INE doesn't exist
+CREATE CATALOG IF NOT EXISTS test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive 
test catalog';
+SHOW CATALOGS;
+
+-- DROP IE exists
+DROP CATALOG IF EXISTS test_cat;
+SHOW CATALOGS;
+
+-- DROP IE doesn't exist
+DROP CATALOG IF EXISTS test_cat;
+
+-- SHOW
+CREATE CATALOG test_cat LOCATION '/tmp/test_cat' COMMENT 'Hive test catalog';
+SHOW CATALOGS;
+
+-- SHOW pattern
+SHOW CATALOGS LIKE 'test%';
+
+-- SHOW pattern
+SHOW CATALOGS LIKE 'test_';
+
+-- SHOW pattern
+SHOW CATALOGS LIKE 'test__';
+
+-- ALTER LOCATION
+ALTER CATALOG test_cat SET LOCATION '/tmp/test_cat_new';
+DESC CATALOG EXTENDED test_cat;
diff --git a/ql/src/test/results/clientpositive/llap/catalog.q.out 
b/ql/src/test/results/clientpositive/llap/catalog.q.out
new file mode 100644
index 00000000000..6f9ef138dcd
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/catalog.q.out
@@ -0,0 +1,118 @@
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+#### A masked pattern was here ####
+PREHOOK: type: CREATECATALOG
+PREHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+POSTHOOK: type: CREATECATALOG
+POSTHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+PREHOOK: query: DESC CATALOG test_cat
+PREHOOK: type: DESCCATALOG
+PREHOOK: Input: catalog:test_cat
+POSTHOOK: query: DESC CATALOG test_cat
+POSTHOOK: type: DESCCATALOG
+POSTHOOK: Input: catalog:test_cat
+Catalog Name   test_cat         
+Comment        Hive test catalog        
+#### A masked pattern was here ####
+PREHOOK: type: CREATECATALOG
+PREHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+POSTHOOK: type: CREATECATALOG
+POSTHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+test_cat
+PREHOOK: query: DROP CATALOG test_cat
+PREHOOK: type: DROPCATALOG
+PREHOOK: Input: catalog:test_cat
+PREHOOK: Output: catalog:test_cat
+POSTHOOK: query: DROP CATALOG test_cat
+POSTHOOK: type: DROPCATALOG
+POSTHOOK: Input: catalog:test_cat
+POSTHOOK: Output: catalog:test_cat
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+#### A masked pattern was here ####
+PREHOOK: type: CREATECATALOG
+PREHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+POSTHOOK: type: CREATECATALOG
+POSTHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+test_cat
+PREHOOK: query: DROP CATALOG IF EXISTS test_cat
+PREHOOK: type: DROPCATALOG
+PREHOOK: Input: catalog:test_cat
+PREHOOK: Output: catalog:test_cat
+POSTHOOK: query: DROP CATALOG IF EXISTS test_cat
+POSTHOOK: type: DROPCATALOG
+POSTHOOK: Input: catalog:test_cat
+POSTHOOK: Output: catalog:test_cat
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+PREHOOK: query: DROP CATALOG IF EXISTS test_cat
+PREHOOK: type: DROPCATALOG
+POSTHOOK: query: DROP CATALOG IF EXISTS test_cat
+POSTHOOK: type: DROPCATALOG
+#### A masked pattern was here ####
+PREHOOK: type: CREATECATALOG
+PREHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+POSTHOOK: type: CREATECATALOG
+POSTHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+PREHOOK: query: SHOW CATALOGS
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS
+POSTHOOK: type: SHOWCATALOGS
+hive
+test_cat
+PREHOOK: query: SHOW CATALOGS LIKE 'test%'
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS LIKE 'test%'
+POSTHOOK: type: SHOWCATALOGS
+test_cat
+PREHOOK: query: SHOW CATALOGS LIKE 'test_'
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS LIKE 'test_'
+POSTHOOK: type: SHOWCATALOGS
+PREHOOK: query: SHOW CATALOGS LIKE 'test__'
+PREHOOK: type: SHOWCATALOGS
+POSTHOOK: query: SHOW CATALOGS LIKE 'test__'
+POSTHOOK: type: SHOWCATALOGS
+#### A masked pattern was here ####
+PREHOOK: type: ALTERCATALOG_LOCATION
+PREHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+POSTHOOK: type: ALTERCATALOG_LOCATION
+POSTHOOK: Output: catalog:test_cat
+#### A masked pattern was here ####
+PREHOOK: query: DESC CATALOG EXTENDED test_cat
+PREHOOK: type: DESCCATALOG
+PREHOOK: Input: catalog:test_cat
+POSTHOOK: query: DESC CATALOG EXTENDED test_cat
+POSTHOOK: type: DESCCATALOG
+POSTHOOK: Input: catalog:test_cat
+Catalog Name   test_cat                 
+Comment        Hive test catalog                
+#### A masked pattern was here ####

Reply via email to