Repository: spark Updated Branches: refs/heads/master f280ccf44 -> 5572ccf86
[SPARK-17932][SQL][FOLLOWUP] Change statement `SHOW TABLES EXTENDED` to `SHOW TABLE EXTENDED` ## What changes were proposed in this pull request? Change the statement `SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] [PARTITION(partition_spec)]` to the following statements: - SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] - SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards' [PARTITION(partition_spec)] After this change, the statements `SHOW TABLE/SHOW TABLES` have the same syntax with that HIVE has. ## How was this patch tested? Modified the test sql file `show-tables.sql`; Modified the test suite `DDLSuite`. Author: jiangxingbo <[email protected]> Closes #16262 from jiangxb1987/show-table-extended. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5572ccf8 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5572ccf8 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5572ccf8 Branch: refs/heads/master Commit: 5572ccf86b084eb5938fe62fd5d9973ec14d555d Parents: f280ccf Author: jiangxingbo <[email protected]> Authored: Tue Dec 13 19:04:34 2016 +0100 Committer: Herman van Hovell <[email protected]> Committed: Tue Dec 13 19:04:34 2016 +0100 ---------------------------------------------------------------------- .../apache/spark/sql/catalyst/parser/SqlBase.g4 | 6 +++-- .../spark/sql/execution/SparkSqlParser.scala | 26 ++++++++++++++------ .../spark/sql/execution/command/tables.scala | 7 +++--- .../resources/sql-tests/inputs/show-tables.sql | 8 +++--- .../sql-tests/results/show-tables.sql.out | 14 ++++++----- .../spark/sql/execution/command/DDLSuite.scala | 6 ++--- 6 files changed, 41 insertions(+), 26 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index 075c73d..63055b6 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -120,8 +120,10 @@ statement (USING resource (',' resource)*)? #createFunction | DROP TEMPORARY? FUNCTION (IF EXISTS)? qualifiedName #dropFunction | EXPLAIN (LOGICAL | FORMATTED | EXTENDED | CODEGEN)? statement #explain - | SHOW TABLES EXTENDED? ((FROM | IN) db=identifier)? - (LIKE? pattern=STRING)? partitionSpec? #showTables + | SHOW TABLES ((FROM | IN) db=identifier)? + (LIKE? pattern=STRING)? #showTables + | SHOW TABLE EXTENDED ((FROM | IN) db=identifier)? + LIKE pattern=STRING partitionSpec? #showTable | SHOW DATABASES (LIKE pattern=STRING)? #showDatabases | SHOW TBLPROPERTIES table=tableIdentifier ('(' key=tablePropertyKey ')')? #showTblProperties http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 4400174..cab1b22 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -126,23 +126,33 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder { * Create a [[ShowTablesCommand]] logical plan. * Example SQL : * {{{ - * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards'] - * [PARTITION(partition_spec)]; + * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; * }}} */ override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) { + ShowTablesCommand( + Option(ctx.db).map(_.getText), + Option(ctx.pattern).map(string), + isExtended = false) + } + + /** + * Create a [[ShowTablesCommand]] logical plan. + * Example SQL : + * {{{ + * SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards' + * [PARTITION(partition_spec)]; + * }}} + */ + override def visitShowTable(ctx: ShowTableContext): LogicalPlan = withOrigin(ctx) { if (ctx.partitionSpec != null) { - operationNotAllowed("SHOW TABLES [EXTENDED] ... PARTITION", ctx) - } - if (ctx.EXTENDED != null && ctx.pattern == null) { - throw new AnalysisException( - s"SHOW TABLES EXTENDED must have identifier_with_wildcards specified.") + operationNotAllowed("SHOW TABLE EXTENDED ... PARTITION", ctx) } ShowTablesCommand( Option(ctx.db).map(_.getText), Option(ctx.pattern).map(string), - ctx.EXTENDED != null) + isExtended = true) } /** http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index d2a7556..012b6ea 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -592,7 +592,8 @@ case class DescribeTableCommand( * If a databaseName is not given, the current database will be used. * The syntax of using this command in SQL is: * {{{ - * SHOW TABLES [EXTENDED] [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; + * SHOW TABLES [(IN|FROM) database_name] [[LIKE] 'identifier_with_wildcards']; + * SHOW TABLE EXTENDED [(IN|FROM) database_name] LIKE 'identifier_with_wildcards'; * }}} */ case class ShowTablesCommand( @@ -600,8 +601,8 @@ case class ShowTablesCommand( tableIdentifierPattern: Option[String], isExtended: Boolean = false) extends RunnableCommand { - // The result of SHOW TABLES has three basic columns: database, tableName and isTemporary. - // If `isExtended` is true, append column `information` to the output columns. + // The result of SHOW TABLES/SHOW TABLE has three basic columns: database, tableName and + // isTemporary. If `isExtended` is true, append column `information` to the output columns. override val output: Seq[Attribute] = { val tableExtendedInfo = if (isExtended) { AttributeReference("information", StringType, nullable = false)() :: Nil http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql ---------------------------------------------------------------------- diff --git a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql index a16c398..18d02e1 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/show-tables.sql @@ -16,11 +16,11 @@ SHOW TABLES 'show_t*'; SHOW TABLES LIKE 'show_t1*|show_t2*'; SHOW TABLES IN showdb 'show_t*'; --- SHOW TABLES EXTENDED +-- SHOW TABLE EXTENDED -- Ignore these because there exist timestamp results, e.g. `Created`. --- SHOW TABLES EXTENDED LIKE 'show_t*'; -SHOW TABLES EXTENDED; -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us'); +-- SHOW TABLE EXTENDED LIKE 'show_t*'; +SHOW TABLE EXTENDED; +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us'); -- Clean Up DROP TABLE show_t1; http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out ---------------------------------------------------------------------- diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out index a4f4112..904601b 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out @@ -114,28 +114,30 @@ show_t3 -- !query 12 -SHOW TABLES EXTENDED +SHOW TABLE EXTENDED -- !query 12 schema struct<> -- !query 12 output org.apache.spark.sql.catalyst.parser.ParseException -SHOW TABLES EXTENDED must have identifier_with_wildcards specified. +mismatched input '<EOF>' expecting 'LIKE'(line 1, pos 19) + == SQL == -SHOW TABLES EXTENDED +SHOW TABLE EXTENDED +-------------------^^^ -- !query 13 -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us') +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') -- !query 13 schema struct<> -- !query 13 output org.apache.spark.sql.catalyst.parser.ParseException -Operation not allowed: SHOW TABLES [EXTENDED] ... PARTITION(line 1, pos 0) +Operation not allowed: SHOW TABLE EXTENDED ... PARTITION(line 1, pos 0) == SQL == -SHOW TABLES EXTENDED LIKE 'show_t1' PARTITION(c='Us') +SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(c='Us') ^^^ http://git-wip-us.apache.org/repos/asf/spark/blob/5572ccf8/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index e61beb4..4c0e0fa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -885,7 +885,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { testRenamePartitions(isDatasourceTable = true) } - test("show tables") { + test("show table extended") { withTempView("show1a", "show2b") { sql( """ @@ -909,9 +909,9 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { |) """.stripMargin) assert( - sql("SHOW TABLES EXTENDED LIKE 'show*'").count() >= 2) + sql("SHOW TABLE EXTENDED LIKE 'show*'").count() >= 2) assert( - sql("SHOW TABLES EXTENDED LIKE 'show*'").schema == + sql("SHOW TABLE EXTENDED LIKE 'show*'").schema == StructType(StructField("database", StringType, false) :: StructField("tableName", StringType, false) :: StructField("isTemporary", BooleanType, false) :: --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
