imback82 commented on a change in pull request #27187: [SPARK-30497][SQL] migrate DESCRIBE TABLE to the new framework URL: https://github.com/apache/spark/pull/27187#discussion_r366675995
########## File path: sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala ########## @@ -214,23 +214,12 @@ class ResolveSessionCatalog( case RenameTableStatement(SessionCatalogAndTable(_, oldName), newNameParts, isView) => AlterTableRenameCommand(oldName.asTableIdentifier, newNameParts.asTableIdentifier, isView) - case DescribeTableStatement( - nameParts @ SessionCatalogAndTable(catalog, tbl), partitionSpec, isExtended) => - loadTable(catalog, tbl.asIdentifier).collect { - case v1Table: V1Table => - DescribeTableCommand(tbl.asTableIdentifier, partitionSpec, isExtended) - }.getOrElse { - // The v1 `DescribeTableCommand` can describe view as well. - if (isView(tbl)) { - DescribeTableCommand(tbl.asTableIdentifier, partitionSpec, isExtended) - } else { - if (partitionSpec.nonEmpty) { - throw new AnalysisException("DESCRIBE TABLE does not support partition for v2 tables.") - } - val r = UnresolvedV2Relation(nameParts, catalog.asTableCatalog, tbl.asIdentifier) - DescribeTable(r, isExtended) - } - } + case DescribeRelation(ResolvedTable(_, ident, _: V1Table), partitionSpec, isExtended) => + DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended) + + // Use v1 command to describe temp view, as v2 catalog doesn't support view yet. Review comment: `describe (temp) view` ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org