This is an automated email from the ASF dual-hosted git repository.
akashrn5 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git
The following commit(s) were added to refs/heads/master by this push:
new 085f014 [CARBONDATA-3996]Fixed show table extended like command
exception
085f014 is described below
commit 085f0141feb5825519996f782da46080e2384535
Author: Venu Reddy <[email protected]>
AuthorDate: Fri Sep 18 21:20:35 2020 +0530
[CARBONDATA-3996]Fixed show table extended like command exception
Why is this PR needed?
Show table extended like command throws
java.lang.ArrayIndexOutOfBoundsException.
Same showTablesCommand class is used for show tables command and show table
extended like command from spark. The output attributes for show tables
command
and show table extended like command are 3 and 4 respectively.
CarbonShowTablesCommand class handles for show tables with 3 output
attributes.
Thus have exception when show table extended like command is executed.
What changes were proposed in this PR?
Instead of carbondata querying from session catalog and forming the output
rows,
can call showTablesCommand.run(sparkSession), get the output rows for
tables and
filter out rows corresponding to MV tables.
Does this PR introduce any user interface change?
No
Is any new testcase added?
Yes
This closes #3938
---
.../command/table/CarbonShowTablesCommand.scala | 35 +++------
.../spark/sql/execution/strategy/DDLHelper.scala | 6 --
.../spark/sql/execution/strategy/DDLStrategy.scala | 4 +-
.../spark/sql/hive/SqlAstBuilderHelper.scala | 8 --
.../spark/testsuite/ShowTable/TestShowTable.scala | 88 ++++++++++++++++++++++
5 files changed, 99 insertions(+), 42 deletions(-)
diff --git
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
index 26e0b50..496a66d 100644
---
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
+++
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
@@ -18,35 +18,23 @@
package org.apache.spark.sql.execution.command.table
import org.apache.spark.sql.{Row, SparkSession}
-import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.expressions.{Attribute,
AttributeReference}
-import org.apache.spark.sql.execution.command.MetadataCommand
-import org.apache.spark.sql.types.{BooleanType, StringType}
+import org.apache.spark.sql.catalyst.expressions.Attribute
+import org.apache.spark.sql.execution.command.{MetadataCommand,
ShowTablesCommand}
-private[sql] case class CarbonShowTablesCommand ( databaseName: Option[String],
- tableIdentifierPattern: Option[String]) extends MetadataCommand{
+private[sql] case class CarbonShowTablesCommand(showTablesCommand:
ShowTablesCommand)
+ extends MetadataCommand {
- // The result of SHOW TABLES has three columns: database, tableName and
isTemporary.
- override val output: Seq[Attribute] = {
- AttributeReference("database", StringType, nullable = false)() ::
- AttributeReference("tableName", StringType, nullable = false)() ::
- AttributeReference("isTemporary", BooleanType, nullable = false)() :: Nil
- }
+ override val output: Seq[Attribute] = showTablesCommand.output
override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
- // Since we need to return a Seq of rows, we will call getTables directly
- // instead of calling tables in sparkSession.
- val catalog = sparkSession.sessionState.catalog
- val db = databaseName.getOrElse(catalog.getCurrentDatabase)
- val tables =
- tableIdentifierPattern.map(catalog.listTables(db,
_)).getOrElse(catalog.listTables(db))
+ val rows = showTablesCommand.run(sparkSession)
val externalCatalog = sparkSession.sharedState.externalCatalog
// this method checks whether the table is mainTable or MV based on
property "isVisible"
- def isMainTable(tableIdent: TableIdentifier) = {
+ def isMainTable(db: String, table: String) = {
var isMainTable = true
try {
- isMainTable = externalCatalog.getTable(db,
tableIdent.table).storage.properties
+ isMainTable = externalCatalog.getTable(db, table).storage.properties
.getOrElse("isVisible", true).toString.toBoolean
} catch {
case ex: Throwable =>
@@ -55,12 +43,7 @@ private[sql] case class CarbonShowTablesCommand (
databaseName: Option[String],
isMainTable
}
// tables will be filtered for all the MVs to show only main tables
- tables.collect {
- case tableIdent if isMainTable(tableIdent) =>
- val isTemp = catalog.isTemporaryTable(tableIdent)
- Row(tableIdent.database.getOrElse("default"), tableIdent.table, isTemp)
- }
-
+ rows.filter(row => isMainTable(row.get(0).toString, row.get(1).toString))
}
override protected def opName: String = "SHOW TABLES"
diff --git
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
index 35f799c..cf00db5 100644
---
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
+++
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLHelper.scala
@@ -397,12 +397,6 @@ object DDLHelper {
}
}
- def showTables(showTablesCommand: ShowTablesCommand): Seq[SparkPlan] = {
- ExecutedCommandExec(CarbonShowTablesCommand(
- showTablesCommand.databaseName,
- showTablesCommand.tableIdentifierPattern)) :: Nil
- }
-
//////////////////////////////////////////////////////////////////////////////////
// carbon file
/////////////////////////////////////////////////////////////////////////////////
diff --git
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
index ac7a574..9a0e12c 100644
---
a/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
+++
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/strategy/DDLStrategy.scala
@@ -27,7 +27,7 @@ import
org.apache.spark.sql.execution.command.index.{DropIndexCommand, ShowIndex
import
org.apache.spark.sql.execution.command.management.{CarbonAlterTableCompactionCommand,
CarbonInsertIntoCommand}
import org.apache.spark.sql.execution.command.mutation.CarbonTruncateCommand
import org.apache.spark.sql.execution.command.schema._
-import
org.apache.spark.sql.execution.command.table.{CarbonCreateTableLikeCommand,
CarbonDropTableCommand, CarbonShowCreateTableCommand}
+import
org.apache.spark.sql.execution.command.table.{CarbonCreateTableLikeCommand,
CarbonDropTableCommand, CarbonShowCreateTableCommand, CarbonShowTablesCommand}
import
org.apache.spark.sql.execution.datasources.{InsertIntoHadoopFsRelationCommand,
RefreshResource, RefreshTable}
import org.apache.spark.sql.hive.execution.CreateHiveTableAsSelectCommand
import org.apache.spark.sql.hive.execution.command.{CarbonDropDatabaseCommand,
CarbonResetCommand, CarbonSetCommand, MatchResetCommand}
@@ -194,7 +194,7 @@ class DDLStrategy(sparkSession: SparkSession) extends
SparkStrategy {
case explain: ExplainCommand =>
DDLHelper.explain(explain, sparkSession)
case showTables: ShowTablesCommand =>
- DDLHelper.showTables(showTables)
+ ExecutedCommandExec(CarbonShowTablesCommand(showTables)) :: Nil
case CarbonCreateSecondaryIndexCommand(
indexModel, tableProperties, ifNotExists, isDeferredRefresh,
isCreateSIndex) =>
val isCarbonTable = CarbonEnv.getInstance(sparkSession).carbonMetaStore
diff --git
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
index dae6907..2f897b9 100644
---
a/integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
+++
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/SqlAstBuilderHelper.scala
@@ -84,14 +84,6 @@ trait SqlAstBuilderHelper extends SparkSqlAstBuilder {
super.visitCreateTable(ctx)
}
- override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = {
- withOrigin(ctx) {
- CarbonShowTablesCommand(
- Option(ctx.db).map(_.getText),
- Option(ctx.pattern).map(string))
- }
- }
-
override def visitExplain(ctx: SqlBaseParser.ExplainContext): LogicalPlan = {
CarbonExplainCommand(super.visitExplain(ctx))
}
diff --git
a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/ShowTable/TestShowTable.scala
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/ShowTable/TestShowTable.scala
new file mode 100644
index 0000000..7af1c9e
--- /dev/null
+++
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/ShowTable/TestShowTable.scala
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.spark.testsuite.ShowTable
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
+
+import
org.apache.carbondata.common.exceptions.sql.MalformedIndexCommandException
+
+/**
+ * Test class for show tables.
+ */
+class TestShowTable extends QueryTest with BeforeAndAfterAll with
BeforeAndAfterEach {
+ val dbName = "testshowtable"
+ override def beforeAll: Unit = {
+ sql("use default")
+ sql(s"drop database if exists $dbName cascade")
+ sql(s"create database $dbName")
+ sql(s"use $dbName")
+ }
+
+ test("test show tables") {
+ sql("create table employee(id string, name string) stored as carbondata")
+ sql("create table employee_part(name string) partitioned by (grade int)")
+ sql("create index employee_si on table employee(name) as 'carbondata'")
+ sql("create materialized view employee_mv as select name from employee
group by name")
+ val rows = sql("show tables").collect()
+ val schema = rows(0).schema
+ assert(schema.length == 3)
+ assert(schema(0).name.equals("database"))
+ assert(schema(1).name.equals("tableName"))
+ assert(schema(2).name.equals("isTemporary"))
+ // show tables query can return views as well. Just validate if expected
rows are present and
+ // mv row is not present
+ val expectedRows = Seq(Row(dbName, "employee", false),
+ Row(dbName, "employee_si", false),
+ Row(dbName, "employee_part", false))
+ assert(rows.intersect(expectedRows).length == 3)
+ assert(rows.intersect(Seq(Row(dbName, "employee_mv", false))).length == 0)
+ }
+
+ test("test show table extended like") {
+ sql("create table employee(id string, name string) stored as carbondata")
+ sql("create table employee_part(name string) partitioned by (grade int)")
+ sql("create index employee_si on table employee(name) as 'carbondata'")
+ sql("create materialized view employee_mv as select name from employee
group by name")
+ val rows = sql("show table extended like 'emp*'").collect()
+ assert(rows.length == 3)
+ val schema = rows(0).schema
+ assert(schema.length == 4)
+ assert(schema(0).name.equals("database"))
+ assert(schema(1).name.equals("tableName"))
+ assert(schema(2).name.equals("isTemporary"))
+ assert(schema(3).name.equals("information"))
+ }
+
+ override def afterEach(): Unit = {
+ drop
+ }
+ override def afterAll(): Unit = {
+ sql("use default")
+ sql(s"drop database if exists $dbName cascade")
+ }
+
+ def drop(): Unit = {
+
scala.util.control.Exception.ignoring(classOf[MalformedIndexCommandException]) {
+ sql("drop index employee_si on employee")
+ }
+ sql("drop materialized view if exists employee_mv1")
+ sql("drop table if exists employee")
+ sql("drop table if exists employee_part")
+ }
+}