dilipbiswal commented on a change in pull request #24938: [SPARK-27946][SQL]
Hive DDL to Spark DDL conversion USING "show create table"
URL: https://github.com/apache/spark/pull/24938#discussion_r310297582
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala
##########
@@ -1057,83 +1143,86 @@ case class ShowCreateTableCommand(table:
TableIdentifier) extends RunnableComman
}
}
}
+}
- private def showTableLocation(metadata: CatalogTable, builder:
StringBuilder): Unit = {
- if (metadata.tableType == EXTERNAL) {
- metadata.storage.locationUri.foreach { location =>
- builder ++= s"LOCATION
'${escapeSingleQuotedString(CatalogUtils.URIToString(location))}'\n"
- }
- }
- }
+/**
+ * This commands generates Spark DDL for Hive table.
+ *
+ * The syntax of using this command in SQL is:
+ * {{{
+ * SHOW CREATE TABLE table_identifier AS SPARK;
+ * }}}
+ */
+case class ShowCreateTableAsSparkCommand(table: TableIdentifier)
+ extends RunnableCommand with ShowCreateTableCommandBase {
+ override val output: Seq[Attribute] = Seq(
+ AttributeReference("sparktab_stmt", StringType, nullable = false)()
+ )
- private def showTableComment(metadata: CatalogTable, builder:
StringBuilder): Unit = {
- metadata
- .comment
- .map("COMMENT '" + escapeSingleQuotedString(_) + "'\n")
- .foreach(builder.append)
- }
+ override def run(sparkSession: SparkSession): Seq[Row] = {
+ val catalog = sparkSession.sessionState.catalog
+ val tableMetadata = catalog.getTableMetadata(table)
- private def showTableProperties(metadata: CatalogTable, builder:
StringBuilder): Unit = {
- if (metadata.properties.nonEmpty) {
- val props = metadata.properties.map { case (key, value) =>
- s"'${escapeSingleQuotedString(key)}' =
'${escapeSingleQuotedString(value)}'"
+ val stmt = if (DDLUtils.isDatasourceTable(tableMetadata)) {
+ throw new AnalysisException(
+ s"$table is already a Spark data source table. Use `SHOW CREATE TABLE`
instead.")
+ } else {
+ if (tableMetadata.unsupportedFeatures.nonEmpty) {
+ throw new AnalysisException(
+ "Failed to execute SHOW CREATE TABLE AS SPARK against table " +
+ s"${tableMetadata.identifier}, which is created by Hive and uses
the " +
+ "following unsupported feature(s)\n" +
+ tableMetadata.unsupportedFeatures.map(" - " + _).mkString("\n")
+ )
}
- builder ++= props.mkString("TBLPROPERTIES (\n ", ",\n ", "\n)\n")
- }
- }
-
- private def showCreateDataSourceTable(metadata: CatalogTable): String = {
- val builder = StringBuilder.newBuilder
-
- builder ++= s"CREATE TABLE ${table.quotedString} "
- showDataSourceTableDataColumns(metadata, builder)
- showDataSourceTableOptions(metadata, builder)
- showDataSourceTableNonDataColumns(metadata, builder)
- showTableComment(metadata, builder)
- showTableLocation(metadata, builder)
- showTableProperties(metadata, builder)
-
- builder.toString()
- }
-
- private def showDataSourceTableDataColumns(
- metadata: CatalogTable, builder: StringBuilder): Unit = {
- val columns = metadata.schema.fields.map(_.toDDL)
- builder ++= columns.mkString("(", ", ", ")\n")
- }
+ if (tableMetadata.tableType == VIEW) {
+ throw new AnalysisException("Hive view isn't supported by SHOW CREATE
TABLE AS SPARK")
+ }
- private def showDataSourceTableOptions(metadata: CatalogTable, builder:
StringBuilder): Unit = {
- builder ++= s"USING ${metadata.provider.get}\n"
+ // scalastyle:off caselocale
+ if (tableMetadata.properties.getOrElse("transactional",
"false").toLowerCase.equals("true")) {
Review comment:
IMHO .. its a good idea. I am not sure what happens today when we try to
select from a hive transactional table ? If we add it to unsupported features,
then we will get an error during select ?
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]