This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 2dd6a38  [SPARK-31113][SQL] Add SHOW VIEWS command
2dd6a38 is described below

commit 2dd6a38725b86b0cf248625ec213a0ea8e4e5815
Author: Eric Wu <492960...@qq.com>
AuthorDate: Tue Apr 7 09:25:01 2020 -0700

    [SPARK-31113][SQL] Add SHOW VIEWS command
    
    ### What changes were proposed in this pull request?
    Previously, user can issue `SHOW TABLES` to get info of both tables and 
views.
    This PR (SPARK-31113) implements `SHOW VIEWS` SQL command similar to HIVE 
to get views 
only.(https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-ShowViews)
    
    **Hive** -- Only show view names
    ```
    hive> SHOW VIEWS;
    OK
    view_1
    view_2
    ...
    ```
    
    **Spark(Hive-Compatible)** -- Only show view names, used in tests and 
`SparkSQLDriver` for CLI applications
    ```
    SHOW VIEWS IN showdb;
    view_1
    view_2
    ...
    ```
    
    **Spark** -- Show more information database/viewName/isTemporary
    ```
    spark-sql> SHOW VIEWS;
    userdb      view_1  false
    userdb      view_2  false
    ...
    ```
    
    ### Why are the changes needed?
    `SHOW VIEWS` command provides better granularity to only get information of 
views.
    
    ### Does this PR introduce any user-facing change?
    Add new `SHOW VIEWS` SQL command
    
    ### How was this patch tested?
    Add new test `show-views.sql` and pass existing tests
    
    Closes #27897 from Eric5553/ShowViews.
    
    Authored-by: Eric Wu <492960...@qq.com>
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
    (cherry picked from commit a28ed86a387b286745b30cd4d90b3d558205a5a7)
    Signed-off-by: Dongjoon Hyun <dongj...@apache.org>
---
 docs/_data/menu-sql.yaml                           |   2 +
 docs/sql-ref-ansi-compliance.md                    |   1 +
 docs/sql-ref-syntax-aux-show-tables.md             |   2 +-
 docs/sql-ref-syntax-aux-show-views.md              | 121 +++++++++++++
 docs/sql-ref-syntax-aux-show.md                    |   1 +
 docs/sql-ref-syntax-ddl-alter-view.md              |   1 +
 docs/sql-ref-syntax-ddl-create-view.md             |   1 +
 docs/sql-ref-syntax-ddl-drop-view.md               |   1 +
 docs/sql-ref-syntax.md                             |   1 +
 .../apache/spark/sql/catalyst/parser/SqlBase.g4    |   7 +-
 .../spark/sql/catalyst/analysis/Analyzer.scala     |   2 +
 .../sql/catalyst/catalog/ExternalCatalog.scala     |   2 +
 .../catalog/ExternalCatalogWithListener.scala      |   4 +
 .../sql/catalyst/catalog/InMemoryCatalog.scala     |   6 +
 .../sql/catalyst/catalog/SessionCatalog.scala      |  19 ++
 .../spark/sql/catalyst/parser/AstBuilder.scala     |  10 +
 .../sql/catalyst/plans/logical/v2Commands.scala    |  16 ++
 .../spark/sql/catalyst/parser/DDLParserSuite.scala |  37 +++-
 .../parser/TableIdentifierParserSuite.scala        |   2 +
 .../catalyst/analysis/ResolveSessionCatalog.scala  |  14 ++
 .../apache/spark/sql/execution/HiveResult.scala    |   6 +-
 .../apache/spark/sql/execution/command/views.scala |  39 +++-
 .../resources/sql-tests/inputs/show-tables.sql     |   1 +
 .../test/resources/sql-tests/inputs/show-views.sql |  28 +++
 .../sql-tests/results/show-tables.sql.out          |  12 +-
 .../resources/sql-tests/results/show-views.sql.out | 201 +++++++++++++++++++++
 .../spark/sql/connector/DataSourceV2SQLSuite.scala |  17 ++
 .../spark/sql/hive/HiveExternalCatalog.scala       |   5 +
 .../spark/sql/hive/client/VersionsSuite.scala      |  10 +-
 .../sql/hive/execution/HiveCommandSuite.scala      |  46 +++++
 30 files changed, 605 insertions(+), 10 deletions(-)

diff --git a/docs/_data/menu-sql.yaml b/docs/_data/menu-sql.yaml
index 787ee3e..ffc3b3c 100644
--- a/docs/_data/menu-sql.yaml
+++ b/docs/_data/menu-sql.yaml
@@ -209,6 +209,8 @@
                   url: sql-ref-syntax-aux-show-partitions.html
                 - text: SHOW CREATE TABLE
                   url: sql-ref-syntax-aux-show-create-table.html
+                - text: SHOW VIEWS
+                  url: sql-ref-syntax-aux-show-views.html
             - text: CONFIGURATION MANAGEMENT
               url: sql-ref-syntax-aux-conf-mgmt.html
               subitems:
diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index 83affb9..7543180 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -411,6 +411,7 @@ Below is a list of all the keywords in Spark SQL.
   
<tr><td>USING</td><td>reserved</td><td>strict-non-reserved</td><td>reserved</td></tr>
   
<tr><td>VALUES</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr>
   
<tr><td>VIEW</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr>
+  
<tr><td>VIEWS</td><td>non-reserved</td><td>non-reserved</td><td>non-reserved</td></tr>
   <tr><td>WHEN</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr>
   
<tr><td>WHERE</td><td>reserved</td><td>non-reserved</td><td>reserved</td></tr>
   
<tr><td>WINDOW</td><td>non-reserved</td><td>non-reserved</td><td>reserved</td></tr>
diff --git a/docs/sql-ref-syntax-aux-show-tables.md 
b/docs/sql-ref-syntax-aux-show-tables.md
index 2a078ab..311401c 100644
--- a/docs/sql-ref-syntax-aux-show-tables.md
+++ b/docs/sql-ref-syntax-aux-show-tables.md
@@ -89,7 +89,7 @@ SHOW TABLES FROM default LIKE 'sam*';
   +-----------+------------+--------------+--+
   
 -- List all tables matching the pattern `sam*|suj`
-SHOW TABLES  LIKE 'sam*|suj';
+SHOW TABLES LIKE 'sam*|suj';
   +-----------+------------+--------------+--+
   | database  | tableName  | isTemporary  |
   +-----------+------------+--------------+--+
diff --git a/docs/sql-ref-syntax-aux-show-views.md 
b/docs/sql-ref-syntax-aux-show-views.md
new file mode 100644
index 0000000..a5e840d
--- /dev/null
+++ b/docs/sql-ref-syntax-aux-show-views.md
@@ -0,0 +1,121 @@
+---
+layout: global
+title: SHOW VIEWS
+displayTitle: SHOW VIEWS
+license: |
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+ 
+     http://www.apache.org/licenses/LICENSE-2.0
+ 
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+---
+### Description
+
+The `SHOW VIEWS` statement returns all the views for an optionally specified 
database.
+Additionally, the output of this statement may be filtered by an optional 
matching
+pattern. If no database is specified then the views are returned from the 
+current database. If the specified database is global temporary view database, 
we will
+list global temporary views. Note that the command also lists local temporary 
views 
+regardless of a given database.
+
+### Syntax
+{% highlight sql %}
+SHOW VIEWS [ { FROM | IN } database_name ] [ LIKE 'regex_pattern' ]
+{% endhighlight %}
+
+### Parameters
+<dl>
+  <dt><code><em>{ FROM | IN } database_name</em></code></dt>
+  <dd>
+     Specifies the database name from which views are listed.
+  </dd>
+  <dt><code><em>LIKE regex_pattern</em></code></dt>
+  <dd>
+     Specifies the regular expression pattern that is used to filter out 
unwanted views. 
+     <ul> 
+          <li> Except for `*` and `|` character, the pattern works like a 
regex.</li>
+          <li> `*` alone matches 0 or more characters and `|` is used to 
separate multiple different regexes,
+           any of which can match. </li>
+          <li> The leading and trailing blanks are trimmed in the input 
pattern before processing.</li>
+     </ul>
+  </dd>
+</dl>
+
+### Example
+{% highlight sql %}
+-- Create views in different databases, also create global/local temp views.
+CREATE VIEW sam AS SELECT id, salary FROM employee WHERE name = 'sam';
+CREATE VIEW sam1 AS SELECT id, salary FROM employee WHERE name = 'sam1';
+CREATE VIEW suj AS SELECT id, salary FROM employee WHERE name = 'suj';
+USE userdb;
+CREATE VIEW user1 AS SELECT id, salary FROM default.employee WHERE name = 
'user1';
+CREATE VIEW user2 AS SELECT id, salary FROM default.employee WHERE name = 
'user2';
+USE default;
+CREATE GLOBAL TEMP VIEW temp1 AS SELECT 1 as col1;
+CREATE TEMP VIEW temp2 AS SELECT 1 as col1;
+
+-- List all views in default database
+SHOW VIEWS;
+  +-------------+------------+--------------+--+
+  | namespace   | viewName   | isTemporary  |
+  +-------------+------------+--------------+--+
+  | default     | sam        | false        |
+  | default     | sam1       | false        |
+  | default     | suj        | false        |
+  |             | temp2      | true         |
+  +-------------+------------+--------------+--+
+
+-- List all views from userdb database 
+SHOW VIEWS FROM userdb;
+  +-------------+------------+--------------+--+
+  | namespace   | viewName   | isTemporary  |
+  +-------------+------------+--------------+--+
+  | userdb      | user1      | false        |
+  | userdb      | user2      | false        |
+  |             | temp2      | true         |
+  +-------------+------------+--------------+--+
+  
+-- List all views in global temp view database 
+SHOW VIEWS IN global_temp;
+  +-------------+------------+--------------+--+
+  | namespace   | viewName   | isTemporary  |
+  +-------------+------------+--------------+--+
+  | global_temp | temp1      | true         |
+  |             | temp2      | true         |
+  +-------------+------------+--------------+--+
+
+-- List all views from default database matching the pattern `sam*`
+SHOW VIEWS FROM default LIKE 'sam*';
+  +-----------+------------+--------------+--+
+  | namespace | viewName   | isTemporary  |
+  +-----------+------------+--------------+--+
+  | default   | sam        | false        |
+  | default   | sam1       | false        |
+  +-----------+------------+--------------+--+
+
+-- List all views from the current database matching the pattern 
`sam|suj|temp*`
+SHOW VIEWS LIKE 'sam|suj|temp*';
+  +-------------+------------+--------------+--+
+  | namespace   | viewName   | isTemporary  |
+  +-------------+------------+--------------+--+
+  | default     | sam        | false        |
+  | default     | suj        | false        |
+  |             | temp2      | true         |
+  +-------------+------------+--------------+--+
+
+{% endhighlight %}
+
+### Related statements
+- [CREATE VIEW](sql-ref-syntax-ddl-create-view.html)
+- [DROP VIEW](sql-ref-syntax-ddl-drop-view.html)
+- [CREATE DATABASE](sql-ref-syntax-ddl-create-database.html)
+- [DROP DATABASE](sql-ref-syntax-ddl-drop-database.html)
diff --git a/docs/sql-ref-syntax-aux-show.md b/docs/sql-ref-syntax-aux-show.md
index f6d700a..dd56d46 100644
--- a/docs/sql-ref-syntax-aux-show.md
+++ b/docs/sql-ref-syntax-aux-show.md
@@ -26,3 +26,4 @@ license: |
 * [SHOW TBLPROPERTIES](sql-ref-syntax-aux-show-tblproperties.html)
 * [SHOW PARTITIONS](sql-ref-syntax-aux-show-partitions.html)
 * [SHOW CREATE TABLE](sql-ref-syntax-aux-show-create-table.html)
+* [SHOW VIEWS](sql-ref-syntax-aux-show-views.html)
diff --git a/docs/sql-ref-syntax-ddl-alter-view.md 
b/docs/sql-ref-syntax-ddl-alter-view.md
index d42d040..a29f2b4 100644
--- a/docs/sql-ref-syntax-ddl-alter-view.md
+++ b/docs/sql-ref-syntax-ddl-alter-view.md
@@ -228,6 +228,7 @@ DESC TABLE EXTENDED tempdb1.v2;
 - [describe-table](sql-ref-syntax-aux-describe-table.html)
 - [create-view](sql-ref-syntax-ddl-create-view.html)
 - [drop-view](sql-ref-syntax-ddl-drop-view.html)
+- [show-views](sql-ref-syntax-aux-show-views.html)
 
 #### Note:
 
diff --git a/docs/sql-ref-syntax-ddl-create-view.md 
b/docs/sql-ref-syntax-ddl-create-view.md
index 67060d7..53b4027 100644
--- a/docs/sql-ref-syntax-ddl-create-view.md
+++ b/docs/sql-ref-syntax-ddl-create-view.md
@@ -89,3 +89,4 @@ CREATE GLOBAL TEMPORARY VIEW IF NOT EXISTS subscribed_movies
 ### Related Statements
 - [ALTER VIEW](sql-ref-syntax-ddl-alter-view.html)
 - [DROP VIEW](sql-ref-syntax-ddl-drop-view.html)
+- [SHOW VIEWS](sql-ref-syntax-aux-show-views.html)
diff --git a/docs/sql-ref-syntax-ddl-drop-view.md 
b/docs/sql-ref-syntax-ddl-drop-view.md
index bf9e497b..f313995 100644
--- a/docs/sql-ref-syntax-ddl-drop-view.md
+++ b/docs/sql-ref-syntax-ddl-drop-view.md
@@ -78,5 +78,6 @@ DROP VIEW IF EXISTS employeeView;
 ### Related Statements
 - [CREATE VIEW](sql-ref-syntax-ddl-create-view.html)
 - [ALTER VIEW](sql-ref-syntax-ddl-alter-view.html)
+- [SHOW VIEWS](sql-ref-syntax-aux-show-views.html)
 - [CREATE DATABASE](sql-ref-syntax-ddl-create-database.html)
 - [DROP DATABASE](sql-ref-syntax-ddl-drop-database.html)
diff --git a/docs/sql-ref-syntax.md b/docs/sql-ref-syntax.md
index 3db97ac..dd611d7 100644
--- a/docs/sql-ref-syntax.md
+++ b/docs/sql-ref-syntax.md
@@ -78,5 +78,6 @@ Spark SQL is Apache Spark's module for working with 
structured data. The SQL Syn
 - [SHOW TABLE EXTENDED](sql-ref-syntax-aux-show-table.html)
 - [SHOW TABLES](sql-ref-syntax-aux-show-tables.html)
 - [SHOW TBLPROPERTIES](sql-ref-syntax-aux-show-tblproperties.html)
+- [SHOW VIEWS](sql-ref-syntax-aux-show-views.html)
 - [UNCACHE TABLE](sql-ref-syntax-aux-cache-uncache-table.html)
 - [UNSET](sql-ref-syntax-aux-conf-mgmt-reset.html)
diff --git 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
index 143a567..39f2942 100644
--- 
a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
+++ 
b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4
@@ -158,7 +158,7 @@ statement
         SET TBLPROPERTIES tablePropertyList                            
#setTableProperties
     | ALTER (TABLE | VIEW) multipartIdentifier
         UNSET TBLPROPERTIES (IF EXISTS)? tablePropertyList             
#unsetTableProperties
-    |ALTER TABLE table=multipartIdentifier
+    | ALTER TABLE table=multipartIdentifier
         (ALTER | CHANGE) COLUMN? column=multipartIdentifier
         alterColumnAction?                                             
#alterTableAlterColumn
     | ALTER TABLE table=multipartIdentifier partitionSpec?
@@ -204,6 +204,8 @@ statement
         ('(' key=tablePropertyKey ')')?                                
#showTblProperties
     | SHOW COLUMNS (FROM | IN) table=multipartIdentifier
         ((FROM | IN) ns=multipartIdentifier)?                          
#showColumns
+    | SHOW VIEWS ((FROM | IN) multipartIdentifier)?
+        (LIKE? pattern=STRING)?                                        
#showViews
     | SHOW PARTITIONS multipartIdentifier partitionSpec?               
#showPartitions
     | SHOW identifier? FUNCTIONS
         (LIKE? (multipartIdentifier | pattern=STRING))?                
#showFunctions
@@ -1171,6 +1173,7 @@ ansiNonReserved
     | USE
     | VALUES
     | VIEW
+    | VIEWS
     | WINDOW
     ;
 
@@ -1434,6 +1437,7 @@ nonReserved
     | USER
     | VALUES
     | VIEW
+    | VIEWS
     | WHEN
     | WHERE
     | WINDOW
@@ -1694,6 +1698,7 @@ USER: 'USER';
 USING: 'USING';
 VALUES: 'VALUES';
 VIEW: 'VIEW';
+VIEWS: 'VIEWS';
 WHEN: 'WHEN';
 WHERE: 'WHERE';
 WINDOW: 'WINDOW';
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 67f6f49..e4be634 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -743,6 +743,8 @@ class Analyzer(
     def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
       case s @ ShowTables(UnresolvedNamespace(Seq()), _) =>
         s.copy(namespace = ResolvedNamespace(currentCatalog, 
catalogManager.currentNamespace))
+      case s @ ShowViews(UnresolvedNamespace(Seq()), _) =>
+        s.copy(namespace = ResolvedNamespace(currentCatalog, 
catalogManager.currentNamespace))
       case UnresolvedNamespace(Seq()) =>
         ResolvedNamespace(currentCatalog, Seq.empty[String])
       case UnresolvedNamespace(CatalogAndNamespace(catalog, ns)) =>
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
index dcc1439..db930cf 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala
@@ -136,6 +136,8 @@ trait ExternalCatalog {
 
   def listTables(db: String, pattern: String): Seq[String]
 
+  def listViews(db: String, pattern: String): Seq[String]
+
   /**
    * Loads data into a table.
    *
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogWithListener.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogWithListener.scala
index 86113d3..c2613ff 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogWithListener.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogWithListener.scala
@@ -154,6 +154,10 @@ class ExternalCatalogWithListener(delegate: 
ExternalCatalog)
     delegate.listTables(db, pattern)
   }
 
+  override def listViews(db: String, pattern: String): Seq[String] = {
+    delegate.listViews(db, pattern)
+  }
+
   override def loadTable(
       db: String,
       table: String,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
index abf6993..31644a5 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala
@@ -346,6 +346,12 @@ class InMemoryCatalog(
     StringUtils.filterPattern(listTables(db), pattern)
   }
 
+  override def listViews(db: String, pattern: String): Seq[String] = 
synchronized {
+    requireDbExists(db)
+    val views = catalog(db).tables.filter(_._2.table.tableType == 
CatalogTableType.VIEW).keySet
+    StringUtils.filterPattern(views.toSeq.sorted, pattern)
+  }
+
   override def loadTable(
       db: String,
       table: String,
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 3a63aff..b79857c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -881,6 +881,25 @@ class SessionCatalog(
   }
 
   /**
+   * List all matching views in the specified database, including local 
temporary views.
+   */
+  def listViews(db: String, pattern: String): Seq[TableIdentifier] = {
+    val dbName = formatDatabaseName(db)
+    val dbViews = if (dbName == globalTempViewManager.database) {
+      globalTempViewManager.listViewNames(pattern).map { name =>
+        TableIdentifier(name, Some(globalTempViewManager.database))
+      }
+    } else {
+      requireDbExists(dbName)
+      externalCatalog.listViews(dbName, pattern).map { name =>
+        TableIdentifier(name, Some(dbName))
+      }
+    }
+
+    dbViews ++ listLocalTempViews(pattern)
+  }
+
+  /**
    * List all matching local temporary views.
    */
   def listLocalTempViews(pattern: String): Seq[TableIdentifier] = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index cd4c895..b2ef96d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2897,6 +2897,16 @@ class AstBuilder(conf: SQLConf) extends 
SqlBaseBaseVisitor[AnyRef] with Logging
       Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec))
   }
 
+  /**
+   * Create a [[ShowViews]] command.
+   */
+  override def visitShowViews(ctx: ShowViewsContext): LogicalPlan = 
withOrigin(ctx) {
+    val multiPart = 
Option(ctx.multipartIdentifier).map(visitMultipartIdentifier)
+    ShowViews(
+      UnresolvedNamespace(multiPart.getOrElse(Seq.empty[String])),
+      Option(ctx.pattern).map(string))
+  }
+
   override def visitColPosition(ctx: ColPositionContext): ColumnPosition = {
     ctx.position.getType match {
       case SqlBaseParser.FIRST => ColumnPosition.first()
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
index c04e563..579157a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala
@@ -437,6 +437,22 @@ case class ShowTables(
 }
 
 /**
+ * The logical plan of the SHOW VIEWS command that works for v1 and v2 
catalogs.
+ *
+ * Notes: v2 catalogs do not support views API yet, the command will fallback 
to
+ * v1 ShowViewsCommand during ResolveSessionCatalog.
+ */
+case class ShowViews(
+    namespace: LogicalPlan,
+    pattern: Option[String]) extends Command {
+  override def children: Seq[LogicalPlan] = Seq(namespace)
+
+  override val output: Seq[Attribute] = Seq(
+    AttributeReference("namespace", StringType, nullable = false)(),
+    AttributeReference("viewName", StringType, nullable = false)())
+}
+
+/**
  * The logical plan of the USE/USE NAMESPACE command that works for v2 
catalogs.
  */
 case class SetCatalogAndNamespace(
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 543ea5d..1bef638 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -1180,14 +1180,23 @@ class DDLParserSuite extends AnalysisTest {
       parsePlan("SHOW TABLES"),
       ShowTables(UnresolvedNamespace(Seq.empty[String]), None))
     comparePlans(
+      parsePlan("SHOW TABLES '*test*'"),
+      ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*")))
+    comparePlans(
+      parsePlan("SHOW TABLES LIKE '*test*'"),
+      ShowTables(UnresolvedNamespace(Seq.empty[String]), Some("*test*")))
+    comparePlans(
       parsePlan("SHOW TABLES FROM testcat.ns1.ns2.tbl"),
       ShowTables(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), 
None))
     comparePlans(
       parsePlan("SHOW TABLES IN testcat.ns1.ns2.tbl"),
       ShowTables(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), 
None))
     comparePlans(
-      parsePlan("SHOW TABLES IN tbl LIKE '*dog*'"),
-      ShowTables(UnresolvedNamespace(Seq("tbl")), Some("*dog*")))
+      parsePlan("SHOW TABLES IN ns1 '*test*'"),
+      ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*")))
+    comparePlans(
+      parsePlan("SHOW TABLES IN ns1 LIKE '*test*'"),
+      ShowTables(UnresolvedNamespace(Seq("ns1")), Some("*test*")))
   }
 
   test("show table extended") {
@@ -1215,6 +1224,30 @@ class DDLParserSuite extends AnalysisTest {
         Some(Map("ds" -> "2008-04-09"))))
   }
 
+  test("show views") {
+    comparePlans(
+      parsePlan("SHOW VIEWS"),
+      ShowViews(UnresolvedNamespace(Seq.empty[String]), None))
+    comparePlans(
+      parsePlan("SHOW VIEWS '*test*'"),
+      ShowViews(UnresolvedNamespace(Seq.empty[String]), Some("*test*")))
+    comparePlans(
+      parsePlan("SHOW VIEWS LIKE '*test*'"),
+      ShowViews(UnresolvedNamespace(Seq.empty[String]), Some("*test*")))
+    comparePlans(
+      parsePlan("SHOW VIEWS FROM testcat.ns1.ns2.tbl"),
+      ShowViews(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), 
None))
+    comparePlans(
+      parsePlan("SHOW VIEWS IN testcat.ns1.ns2.tbl"),
+      ShowViews(UnresolvedNamespace(Seq("testcat", "ns1", "ns2", "tbl")), 
None))
+    comparePlans(
+      parsePlan("SHOW VIEWS IN ns1 '*test*'"),
+      ShowViews(UnresolvedNamespace(Seq("ns1")), Some("*test*")))
+    comparePlans(
+      parsePlan("SHOW VIEWS IN ns1 LIKE '*test*'"),
+      ShowViews(UnresolvedNamespace(Seq("ns1")), Some("*test*")))
+  }
+
   test("create namespace -- backward compatibility with 
DATABASE/DBPROPERTIES") {
     val expected = CreateNamespaceStatement(
       Seq("a", "b", "c"),
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
index 053d578..d5b0885 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala
@@ -254,6 +254,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with 
SQLHelper {
     "utctimestamp",
     "values",
     "view",
+    "views",
     "while",
     "with",
     "work",
@@ -525,6 +526,7 @@ class TableIdentifierParserSuite extends SparkFunSuite with 
SQLHelper {
     "using",
     "values",
     "view",
+    "views",
     "when",
     "where",
     "window",
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index 77c5701..58d74e0 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -521,6 +521,20 @@ class ResolveSessionCatalog(
         replace,
         viewType)
 
+    case ShowViews(resolved: ResolvedNamespace, pattern) =>
+      resolved match {
+        case SessionCatalogAndNamespace(_, ns) =>
+          // Fallback to v1 ShowViewsCommand since there is no view API in v2 
catalog
+          assert(ns.nonEmpty)
+          if (ns.length != 1) {
+            throw new AnalysisException(s"The database name is not valid: 
${ns.quoted}")
+          }
+          ShowViewsCommand(ns.head, pattern)
+        case _ =>
+          throw new AnalysisException(s"Catalog ${resolved.catalog.name} 
doesn't support " +
+            "SHOW VIEWS, only SessionCatalog supports this command.")
+      }
+
     case ShowTableProperties(r: ResolvedTable, propertyKey) if 
isSessionCatalog(r.catalog) =>
       ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey)
 
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala
index 21874bd..1a84db1 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/HiveResult.scala
@@ -23,7 +23,7 @@ import java.time.{Instant, LocalDate}
 
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.util.{DateFormatter, DateTimeUtils, 
TimestampFormatter}
-import org.apache.spark.sql.execution.command.{DescribeCommandBase, 
ExecutedCommandExec, ShowTablesCommand}
+import org.apache.spark.sql.execution.command.{DescribeCommandBase, 
ExecutedCommandExec, ShowTablesCommand, ShowViewsCommand}
 import org.apache.spark.sql.execution.datasources.v2.{DescribeTableExec, 
ShowTablesExec}
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
@@ -50,6 +50,10 @@ object HiveResult {
     // namespace and table name.
     case command : ShowTablesExec =>
       command.executeCollect().map(_.getString(1))
+    // SHOW VIEWS in Hive only outputs view names while our v1 command outputs
+    // namespace, viewName, and isTemporary.
+    case command @ ExecutedCommandExec(_: ShowViewsCommand) =>
+      command.executeCollect().map(_.getString(1))
     case other =>
       val result: Seq[Seq[Any]] = 
other.executeCollectPublic().map(_.toSeq).toSeq
       // We need the types so we can output struct field names
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
index 795f900..23f1d6c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala
@@ -23,11 +23,12 @@ import org.apache.spark.sql.{AnalysisException, Row, 
SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.analysis.{GlobalTempView, LocalTempView, 
PersistedView, UnresolvedFunction, UnresolvedRelation, ViewType}
 import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, 
CatalogTable, CatalogTableType, SessionCatalog}
-import org.apache.spark.sql.catalyst.expressions.{Alias, SubqueryExpression}
+import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, 
AttributeReference, SubqueryExpression}
 import org.apache.spark.sql.catalyst.plans.QueryPlan
 import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, View}
+import 
org.apache.spark.sql.connector.catalog.CatalogV2Implicits.NamespaceHelper
 import org.apache.spark.sql.internal.StaticSQLConf
-import org.apache.spark.sql.types.MetadataBuilder
+import org.apache.spark.sql.types.{BooleanType, MetadataBuilder, StringType}
 import org.apache.spark.sql.util.SchemaUtils
 
 /**
@@ -295,6 +296,40 @@ case class AlterViewAsCommand(
   }
 }
 
+/**
+ * A command for users to get views in the given database.
+ * If a databaseName is not given, the current database will be used.
+ * The syntax of using this command in SQL is:
+ * {{{
+ *   SHOW VIEWS [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'];
+ * }}}
+ */
+case class ShowViewsCommand(
+    databaseName: String,
+    tableIdentifierPattern: Option[String]) extends RunnableCommand {
+
+  // The result of SHOW VIEWS has three basic columns: namespace, viewName and 
isTemporary.
+  override val output: Seq[Attribute] = Seq(
+    AttributeReference("namespace", StringType, nullable = false)(),
+    AttributeReference("viewName", StringType, nullable = false)(),
+    AttributeReference("isTemporary", BooleanType, nullable = false)())
+
+  override def run(sparkSession: SparkSession): Seq[Row] = {
+    val catalog = sparkSession.sessionState.catalog
+
+    // Show the information of views.
+    val views = tableIdentifierPattern.map(catalog.listViews(databaseName, _))
+      .getOrElse(catalog.listViews(databaseName, "*"))
+    views.map { tableIdent =>
+      val namespace = tableIdent.database.toArray.quoted
+      val tableName = tableIdent.table
+      val isTemp = catalog.isTemporaryTable(tableIdent)
+
+      Row(namespace, tableName, isTemp)
+    }
+  }
+}
+
 object ViewHelper {
 
   import CatalogTable._
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql 
b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
index 3c77c99..8f46c93 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql
@@ -15,6 +15,7 @@ SHOW TABLES IN showdb;
 SHOW TABLES 'show_t*';
 SHOW TABLES LIKE 'show_t1*|show_t2*';
 SHOW TABLES IN showdb 'show_t*';
+SHOW TABLES IN showdb LIKE 'show_t*';
 
 -- SHOW TABLE EXTENDED
 SHOW TABLE EXTENDED LIKE 'show_t*';
diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-views.sql 
b/sql/core/src/test/resources/sql-tests/inputs/show-views.sql
new file mode 100644
index 0000000..bdf9ef4
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/show-views.sql
@@ -0,0 +1,28 @@
+-- Test data.
+CREATE DATABASE showdb;
+USE showdb;
+CREATE TABLE tbl(a STRING, b INT, c STRING, d STRING) USING parquet;
+CREATE VIEW view_1 AS SELECT * FROM tbl;
+CREATE VIEW view_2 AS SELECT * FROM tbl WHERE c='a';
+CREATE GLOBAL TEMP VIEW view_3 AS SELECT 1 as col1;
+CREATE TEMPORARY VIEW view_4(e INT) USING parquet;
+
+-- SHOW VIEWS
+SHOW VIEWS;
+SHOW VIEWS FROM showdb;
+SHOW VIEWS IN showdb;
+SHOW VIEWS IN global_temp;
+
+-- SHOW VIEWS WITH wildcard match
+SHOW VIEWS 'view_*';
+SHOW VIEWS LIKE 'view_1*|view_2*';
+SHOW VIEWS IN showdb 'view_*';
+SHOW VIEWS IN showdb LIKE 'view_*';
+-- Error when database not exists
+SHOW VIEWS IN wrongdb LIKE 'view_*';
+
+-- Clean Up
+DROP VIEW global_temp.view_3;
+DROP VIEW view_4;
+USE default;
+DROP DATABASE showdb CASCADE;
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out 
b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
index 501e185..dd2e7b1 100644
--- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 26
+-- Number of queries: 27
 
 
 -- !query
@@ -120,6 +120,16 @@ show_t3
 
 
 -- !query
+SHOW TABLES IN showdb LIKE 'show_t*'
+-- !query schema
+struct<database:string,tableName:string,isTemporary:boolean>
+-- !query output
+show_t1
+show_t2
+show_t3
+
+
+-- !query
 SHOW TABLE EXTENDED LIKE 'show_t*'
 -- !query schema
 struct<database:string,tableName:string,isTemporary:boolean,information:string>
diff --git a/sql/core/src/test/resources/sql-tests/results/show-views.sql.out 
b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out
new file mode 100644
index 0000000..ee8a747
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/show-views.sql.out
@@ -0,0 +1,201 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 20
+
+
+-- !query
+CREATE DATABASE showdb
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+USE showdb
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TABLE tbl(a STRING, b INT, c STRING, d STRING) USING parquet
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE VIEW view_1 AS SELECT * FROM tbl
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE VIEW view_2 AS SELECT * FROM tbl WHERE c='a'
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE GLOBAL TEMP VIEW view_3 AS SELECT 1 as col1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE TEMPORARY VIEW view_4(e INT) USING parquet
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SHOW VIEWS
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+aggtest
+arraydata
+mapdata
+onek
+tenk1
+testdata
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS FROM showdb
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+aggtest
+arraydata
+mapdata
+onek
+tenk1
+testdata
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS IN showdb
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+aggtest
+arraydata
+mapdata
+onek
+tenk1
+testdata
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS IN global_temp
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+aggtest
+arraydata
+mapdata
+onek
+tenk1
+testdata
+view_3
+view_4
+
+
+-- !query
+SHOW VIEWS 'view_*'
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS LIKE 'view_1*|view_2*'
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+view_1
+view_2
+
+
+-- !query
+SHOW VIEWS IN showdb 'view_*'
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS IN showdb LIKE 'view_*'
+-- !query schema
+struct<namespace:string,viewName:string,isTemporary:boolean>
+-- !query output
+view_1
+view_2
+view_4
+
+
+-- !query
+SHOW VIEWS IN wrongdb LIKE 'view_*'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
+Database 'wrongdb' not found;
+
+
+-- !query
+DROP VIEW global_temp.view_3
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DROP VIEW view_4
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+USE default
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+DROP DATABASE showdb CASCADE
+-- !query schema
+struct<>
+-- !query output
+
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index ffaff66..3244684 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -909,6 +909,23 @@ class DataSourceV2SQLSuite
     assert(exception.getMessage.contains("The database name is not valid: 
a.b"))
   }
 
+  test("ShowViews: using v1 catalog, db name with multipartIdentifier ('a.b') 
is not allowed.") {
+    val exception = intercept[AnalysisException] {
+      sql("SHOW TABLES FROM a.b")
+    }
+
+    assert(exception.getMessage.contains("The database name is not valid: 
a.b"))
+  }
+
+  test("ShowViews: using v2 catalog, command not supported.") {
+    val exception = intercept[AnalysisException] {
+      sql("SHOW VIEWS FROM testcat")
+    }
+
+    assert(exception.getMessage.contains("Catalog testcat doesn't support SHOW 
VIEWS," +
+      " only SessionCatalog supports this command."))
+  }
+
   test("ShowTables: using v2 catalog with empty namespace") {
     spark.sql("CREATE TABLE testcat.table (id bigint, data string) USING foo")
     runShowTablesSql("SHOW TABLES FROM testcat", Seq(Row("", "table")))
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index be6d824..2faf420 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -861,6 +861,11 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, 
hadoopConf: Configurat
     client.listTables(db, pattern)
   }
 
+  override def listViews(db: String, pattern: String): Seq[String] = 
withClient {
+    requireDbExists(db)
+    client.listTablesByType(db, pattern, CatalogTableType.VIEW)
+  }
+
   override def loadTable(
       db: String,
       table: String,
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
index ba75dcf..d1dd136 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/VersionsSuite.scala
@@ -415,14 +415,20 @@ class VersionsSuite extends SparkFunSuite with Logging {
       try {
         client.dropTable("default", tableName = "temporary", ignoreIfNotExists 
= false,
           purge = true)
-        client.dropTable("default", tableName = "view1", ignoreIfNotExists = 
false,
-          purge = true)
         assert(!versionsWithoutPurge.contains(version))
       } catch {
         case _: UnsupportedOperationException =>
           assert(versionsWithoutPurge.contains(version))
           client.dropTable("default", tableName = "temporary", 
ignoreIfNotExists = false,
             purge = false)
+      }
+      // Drop table with type CatalogTableType.VIEW.
+      try {
+        client.dropTable("default", tableName = "view1", ignoreIfNotExists = 
false,
+          purge = true)
+        assert(!versionsWithoutPurge.contains(version))
+      } catch {
+        case _: UnsupportedOperationException =>
           client.dropTable("default", tableName = "view1", ignoreIfNotExists = 
false,
             purge = false)
       }
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
index dbbf2b2..bd85ad4 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala
@@ -117,6 +117,52 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils 
with TestHiveSingleto
     }
   }
 
+  test("show views") {
+    withView("show1a", "show2b", "global_temp.temp1", "temp2") {
+      sql("CREATE VIEW show1a AS SELECT 1 AS id")
+      sql("CREATE VIEW show2b AS SELECT 1 AS id")
+      sql("CREATE GLOBAL TEMP VIEW temp1 AS SELECT 1 AS id")
+      sql("CREATE TEMP VIEW temp2 AS SELECT 1 AS id")
+      checkAnswer(
+        sql("SHOW VIEWS"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) ::
+          Row("default", "parquet_view1", false) ::
+          Row("", "temp2", true) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS IN default"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) ::
+          Row("default", "parquet_view1", false) ::
+          Row("", "temp2", true) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS FROM default"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) ::
+          Row("default", "parquet_view1", false) ::
+          Row("", "temp2", true) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS FROM global_temp"),
+        Row("global_temp", "temp1", true) ::
+          Row("", "temp2", true) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS 'show1*|show2*'"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS LIKE 'show1*|show2*'"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS IN default 'show1*'"),
+        Row("default", "show1a", false) :: Nil)
+      checkAnswer(
+        sql("SHOW VIEWS IN default LIKE 'show1*|show2*'"),
+        Row("default", "show1a", false) ::
+          Row("default", "show2b", false) :: Nil)
+    }
+  }
+
   test("show tblproperties of data source tables - basic") {
     checkAnswer(
       sql("SHOW TBLPROPERTIES parquet_tab1").filter(s"key = 'my_key1'"),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to