cloud-fan commented on code in PR #44377:
URL: https://github.com/apache/spark/pull/44377#discussion_r1429852693
##########
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala:
##########
@@ -83,39 +83,42 @@ class V2SessionCatalog(catalog: SessionCatalog)
override def loadTable(ident: Identifier): Table = {
try {
- val table = catalog.getTableMetadata(ident.asTableIdentifier)
- if (table.provider.isDefined) {
- DataSourceV2Utils.getTableProvider(table.provider.get, conf) match {
- case Some(provider) =>
- // Get the table properties during creation and append the path
option
- // to the properties.
- val dsOptions = getDataSourceOptions(table.properties,
table.storage)
- // If the source accepts external table metadata, we can pass the
schema and
- // partitioning information stored in Hive to `getTable` to avoid
expensive
- // schema/partitioning inference.
- if (provider.supportsExternalMetadata()) {
- provider.getTable(
- table.schema,
- getV2Partitioning(table),
- dsOptions.asCaseSensitiveMap())
- } else {
- provider.getTable(
- provider.inferSchema(dsOptions),
- provider.inferPartitioning(dsOptions),
- dsOptions.asCaseSensitiveMap())
- }
- case _ =>
- V1Table(table)
- }
- } else {
- V1Table(table)
- }
+ toV2Table(catalog.getTableMetadata(ident.asTableIdentifier))
} catch {
case _: NoSuchDatabaseException =>
throw QueryCompilationErrors.noSuchTableError(ident)
}
}
+ private def toV2Table(v1Table: CatalogTable): Table = {
+ if (v1Table.provider.isDefined) {
Review Comment:
no change, just code move around.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]