dohongdayi commented on a change in pull request #34127:
URL: https://github.com/apache/spark/pull/34127#discussion_r722263212
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -80,8 +80,8 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)
- case UseStatement(isNamespaceSet, nameParts) =>
- if (isNamespaceSet) {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
One way to resolve `UnresolvedDBObjectName` in single place, is
`case UnresolvedDBObjectName(CatalogAndNamespace(catalog, name), _) =>`
` ResolvedDBObjectName(catalog, name)`
`case UnresolvedDBObjectName(CatalogAndIdentifier(catalog, identifier), _)
=>`
` ResolvedDBObjectName(catalog, identifier.namespace :+
identifier.name())`
`case Use(ResolvedDBObjectName(catalog, ns)) =>`
` val namespace = if (ns.nonEmpty) Some(ns) else None`
` SetCatalogAndNamespace(catalogManager, Some(catalog.name()),
namespace)`
But as I said, it will CHANGE the behavior of `USE NAMESPACE ...` to be in
line with `CREATE NAMESPACE ...`, which will be NO different with `USE ...`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -80,8 +80,8 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)
- case UseStatement(isNamespaceSet, nameParts) =>
- if (isNamespaceSet) {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
One way to resolve `UnresolvedDBObjectName` in single place, is
`case UnresolvedDBObjectName(CatalogAndNamespace(catalog, name),
isNamespace) if isNamespace =>`
` ResolvedDBObjectName(catalog, name)`
`case UnresolvedDBObjectName(CatalogAndIdentifier(catalog, identifier), _)
=>`
` ResolvedDBObjectName(catalog, identifier.namespace :+
identifier.name())`
`case Use(ResolvedDBObjectName(catalog, ns)) =>`
` val namespace = if (ns.nonEmpty) Some(ns) else None`
` SetCatalogAndNamespace(catalogManager, Some(catalog.name()),
namespace)`
But as I said, it will CHANGE the behavior of `USE NAMESPACE ...` to be in
line with `CREATE NAMESPACE ...`, which will be NO different with `USE ...`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -80,8 +80,8 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)
- case UseStatement(isNamespaceSet, nameParts) =>
- if (isNamespaceSet) {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
One way to resolve `UnresolvedDBObjectName` in single place, is
`case UnresolvedDBObjectName(CatalogAndNamespace(catalog, name), _) =>`
` ResolvedDBObjectName(catalog, name)`
`case Use(ResolvedDBObjectName(catalog, ns)) =>`
` val namespace = if (ns.nonEmpty) Some(ns) else None`
` SetCatalogAndNamespace(catalogManager, Some(catalog.name()),
namespace)`
But as I said, it will CHANGE the behavior of `USE NAMESPACE ...` to be in
line with `CREATE NAMESPACE ...`, which will be NO different with `USE ...`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -80,8 +80,8 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)
- case UseStatement(isNamespaceSet, nameParts) =>
- if (isNamespaceSet) {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
@cloud-fan, I have given a one more try, please take a look
Thanks
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -80,8 +80,8 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
writeOptions = c.writeOptions,
orCreate = c.orCreate)
- case UseStatement(isNamespaceSet, nameParts) =>
- if (isNamespaceSet) {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
@cloud-fan, I have given a one more try, please take a look
Thanks
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -31,6 +31,15 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
I didn't see any other way to resolve `UnresolvedDBObjectName`
separately, without failing [test case of `USE
NAMESPACE`](https://github.com/apache/spark/blob/aa393cdafff5c9f20d41133631a4efcdee3ccdc7/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala#L1356)
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -31,6 +31,15 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
I didn't see any way to resolve `UnresolvedDBObjectName` separately,
without failing [test case of `USE
NAMESPACE`](https://github.com/apache/spark/blob/aa393cdafff5c9f20d41133631a4efcdee3ccdc7/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala#L1356)
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -31,6 +31,15 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
I didn't see any way to resolve `UnresolvedDBObjectName` separately,
without failing [test case of `USE
NAMESPACE`](https://github.com/apache/spark/blob/aa393cdafff5c9f20d41133631a4efcdee3ccdc7/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala#L1354-L1356)
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
##########
@@ -31,6 +31,15 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
import org.apache.spark.sql.connector.catalog.CatalogV2Util._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
+ case Use(UnresolvedDBObjectName(nameParts, isNamespace)) =>
Review comment:
@cloud-fan, could you please advise?
Thanks
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]