cloud-fan commented on code in PR #53630:
URL: https://github.com/apache/spark/pull/53630#discussion_r2922904061
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/RelationResolution.scala:
##########
@@ -89,7 +91,54 @@ class RelationResolution(
return None
}
- v1SessionCatalog.getRawLocalOrGlobalTempView(identifier)
+ val lookupIdentifier = if (isSessionQualifiedViewName(identifier)) {
+ normalizeSessionQualifiedViewIdentifier(identifier)
+ } else {
+ identifier
+ }
+ v1SessionCatalog.getRawLocalOrGlobalTempView(lookupIdentifier)
+ }
+
+ /**
+ * True if the identifier is explicitly qualified as a session (local temp)
view:
+ * session.viewName (2-part) or system.session.viewName (3-part).
+ */
+ private def isSessionQualifiedViewName(nameParts: Seq[String]): Boolean = {
+ nameParts.length == 2 &&
nameParts.head.equalsIgnoreCase(CatalogManager.SESSION_NAMESPACE) ||
+ nameParts.length == 3 &&
Review Comment:
Missing parentheses around the `||` branches makes the precedence
non-obvious. While correct (since `&&` binds tighter than `||`), parentheses
would make the intent immediately clear:
```suggestion
(nameParts.length == 2 &&
nameParts.head.equalsIgnoreCase(CatalogManager.SESSION_NAMESPACE)) ||
```
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/resolver/Resolver.scala:
##########
@@ -506,22 +513,49 @@ class Resolver(
*/
private def resolveRelation(unresolvedRelation: UnresolvedRelation):
LogicalPlan = {
viewResolver.withSourceUnresolvedRelation(unresolvedRelation) {
- val maybeResolvedRelation =
-
relationMetadataProvider.getRelationWithResolvedMetadata(unresolvedRelation)
-
- val resolvedRelation = maybeResolvedRelation match {
- case Some(relationsWithResolvedMetadata) =>
- planLogger.logPlanResolutionEvent(
- relationsWithResolvedMetadata,
- "Relation metadata retrieved"
- )
-
- relationsWithResolvedMetadata
- case None =>
-
unresolvedRelation.tableNotFound(unresolvedRelation.multipartIdentifier)
- }
+ try {
+ val maybeResolvedRelation =
+
relationMetadataProvider.getRelationWithResolvedMetadata(unresolvedRelation)
+
+ val resolvedRelation = maybeResolvedRelation match {
+ case Some(relationsWithResolvedMetadata) =>
+ planLogger.logPlanResolutionEvent(
+ relationsWithResolvedMetadata,
+ "Relation metadata retrieved"
+ )
+
+ relationsWithResolvedMetadata
+ case None =>
+ val multipartId = unresolvedRelation.multipartIdentifier
+ val catalogPath = (catalogManager.currentCatalog.name() +:
+ catalogManager.currentNamespace).toSeq
+ val searchPath =
SQLConf.get.resolutionSearchPath(catalogPath).map(toSQLId)
+ unresolvedRelation.tableNotFound(multipartId, searchPath)
+ }
- resolve(resolvedRelation)
+ resolve(resolvedRelation)
+ } catch {
+ case e: NoSuchTableException
+ if e.getCondition == "TABLE_OR_VIEW_NOT_FOUND" &&
+ Option(e.getMessageParameters.get("searchPath")).contains("") =>
+ // NoSuchTableException from some code paths does not set
searchPath; replace with
+ // the current resolution search path so the user sees the path that
was used.
+ val catalogPath = (catalogManager.currentCatalog.name() +:
+ catalogManager.currentNamespace).toSeq
+ val searchPathSeq = SQLConf.get.resolutionSearchPath(catalogPath)
+ val formattedPath = searchPathSeq.map(toSQLId).mkString("[", ", ",
"]")
+ val updatedParams =
+ e.getMessageParameters.asScala.toMap.updated("searchPath",
formattedPath)
+ val newEx = new AnalysisException(
+ SparkThrowableHelper.getMessage("TABLE_OR_VIEW_NOT_FOUND",
updatedParams.asJava),
Review Comment:
The catch block converts `NoSuchTableException` to `AnalysisException` when
patching the search path. This changes the exception type, which could affect
code that catches `NoSuchTableException` specifically. Consider constructing a
new `NoSuchTableException` (using the `(errorClass, messageParameters, cause)`
constructor) instead.
##########
sql/api/src/main/scala/org/apache/spark/sql/catalyst/analysis/noSuchItemsExceptions.scala:
##########
@@ -116,26 +125,33 @@ class NoSuchTableException private (
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
messageParameters = Map(
"relationName" ->
- (quoteIdentifier(db) + "." + quoteIdentifier(table))),
+ (quoteIdentifier(db) + "." + quoteIdentifier(table)),
+ "searchPath" -> NoSuchItemExceptionHelper.formatSearchPath(Seq(db))),
cause = None)
}
def this(name: Seq[String]) = {
this(
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
- messageParameters = Map("relationName" -> quoteNameParts(name)),
+ messageParameters = Map(
+ "relationName" -> quoteNameParts(name),
+ "searchPath" -> NoSuchItemExceptionHelper.formatSearchPath(
+ if (name.length > 1) name.dropRight(1).toSeq else Seq.empty)),
cause = None)
Review Comment:
When `NoSuchTableException(Seq("foo"))` is constructed with a single-part
name, `searchPath` becomes `formatSearchPath(Seq.empty)` = `""`. Since the
error template now unconditionally includes `Search path: <searchPath>.`, the
user sees the confusing line "Search path: ." The same issue affects
`CannotReplaceMissingTableException`'s backward-compatible constructor.
External connectors constructing `NoSuchTableException` with single-part names
will also produce this. Consider either suppressing the line when `searchPath`
is empty, or providing a fallback like "not available".
##########
sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala:
##########
@@ -300,6 +300,23 @@ class ResolveSessionCatalog(val catalogManager:
CatalogManager)
case DropTable(ResolvedIdentifier(FakeSystemCatalog, ident), _, _) =>
DropTempViewCommand(ident)
+ // Temp view resolved by name (e.g. session.v or system.session.v) -> drop
by table name,
+ // with db = session when session-qualified so SessionCatalog finds the
temp view.
+ case DropView(ResolvedTempView(ident, _), ifExists) =>
+ val db = if (ident.namespace().nonEmpty) {
+ val ns = ident.namespace().toSeq
Review Comment:
The `system.session` namespace-to-db conversion logic is repeated
identically in at least three places: here, `ResolvedViewIdentifier.unapply`
(line 767), and `DropTempViewCommand` in `ddl.scala` (line 276). Similarly, the
parser has `normalizeTempViewIdentifier` and `extractTempFunctionName` with
identical logic. Consider extracting shared helpers to avoid divergence.
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala:
##########
@@ -291,21 +324,36 @@ trait CheckAnalysis extends LookupCatalog with
QueryErrorsBase with PlanToString
u.schemaNotFound(u.multipartIdentifier)
case u: UnresolvedTable =>
- u.tableNotFound(u.multipartIdentifier)
+ // DDL: use TABLE_OR_VIEW_NOT_FOUND with search path filtered to
system.session + catalog.
+ val catalogPath = catalogPathForError
+ u.tableNotFound(u.multipartIdentifier,
ddlSearchPathForError(catalogPath))
case u: UnresolvedView =>
- u.tableNotFound(u.multipartIdentifier)
+ val catalogPath = catalogPathForError
+ u.tableNotFound(u.multipartIdentifier,
ddlSearchPathForError(catalogPath))
case u: UnresolvedTableOrView =>
- u.tableNotFound(u.multipartIdentifier)
+ // DESCRIBE TABLE / DESC TABLE: full resolution path (same as DESCRIBE
FUNCTION and SELECT).
+ // Explicit TEMPORARY VIEW (e.g. DROP TEMPORARY VIEW) -> only
SYSTEM.SESSION; else DDL path.
+ val catalogPath = catalogPathForError
+ val searchPath = if
(u.commandName.toUpperCase(Locale.ROOT).contains("TEMPORARY VIEW")) {
+ tempViewOnlySearchPathForError()
Review Comment:
Using `commandName.toUpperCase(Locale.ROOT).contains("TEMPORARY VIEW")` and
`contains("DESCRIBE TABLE")` to decide the search path is fragile. If command
name strings change, the search path silently falls to the default DDL branch.
Consider matching on node type or a dedicated flag.
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala:
##########
@@ -291,21 +324,36 @@ trait CheckAnalysis extends LookupCatalog with
QueryErrorsBase with PlanToString
u.schemaNotFound(u.multipartIdentifier)
case u: UnresolvedTable =>
- u.tableNotFound(u.multipartIdentifier)
+ // DDL: use TABLE_OR_VIEW_NOT_FOUND with search path filtered to
system.session + catalog.
+ val catalogPath = catalogPathForError
+ u.tableNotFound(u.multipartIdentifier,
ddlSearchPathForError(catalogPath))
case u: UnresolvedView =>
- u.tableNotFound(u.multipartIdentifier)
+ val catalogPath = catalogPathForError
+ u.tableNotFound(u.multipartIdentifier,
ddlSearchPathForError(catalogPath))
case u: UnresolvedTableOrView =>
- u.tableNotFound(u.multipartIdentifier)
+ // DESCRIBE TABLE / DESC TABLE: full resolution path (same as DESCRIBE
FUNCTION and SELECT).
+ // Explicit TEMPORARY VIEW (e.g. DROP TEMPORARY VIEW) -> only
SYSTEM.SESSION; else DDL path.
+ val catalogPath = catalogPathForError
+ val searchPath = if
(u.commandName.toUpperCase(Locale.ROOT).contains("TEMPORARY VIEW")) {
+ tempViewOnlySearchPathForError()
+ } else if (u.commandName.toUpperCase(Locale.ROOT).contains("DESCRIBE
TABLE") ||
+ u.commandName.toUpperCase(Locale.ROOT).contains("DESC TABLE")) {
+ fullSearchPathForError(catalogPath)
+ } else {
+ ddlSearchPathForError(catalogPath)
+ }
+ u.tableNotFound(u.multipartIdentifier, searchPath)
case u: UnresolvedRelation =>
- u.tableNotFound(u.multipartIdentifier)
+ // Queries/DML: TABLE_OR_VIEW_NOT_FOUND with full search path.
+ val catalogPath = catalogPathForError
+ u.tableNotFound(u.multipartIdentifier,
fullSearchPathForError(catalogPath))
Review Comment:
When a session-qualified name like `session.nonexistent` is not found,
`RelationResolution.resolveRelation` returns `None` (only temp views were
searched). The `UnresolvedRelation` then reaches here, which uses
`fullSearchPathForError`, showing `[system.builtin, system.session,
spark_catalog.default]`. This is misleading because only `system.session` was
actually searched. Session-qualified identifiers should get
`tempViewOnlySearchPathForError()` here, similar to how `TEMPORARY VIEW`
commands do.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]