This is an automated email from the ASF dual-hosted git repository.
wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 6806c8bffa0b [SPARK-55024][SQL][FOLLOWUP] Delay namespace length check
to v1 identifier creation
6806c8bffa0b is described below
commit 6806c8bffa0b95a7d0f59fe890e8a8762afb9abb
Author: Wenchen Fan <[email protected]>
AuthorDate: Fri Feb 20 00:28:05 2026 +0800
[SPARK-55024][SQL][FOLLOWUP] Delay namespace length check to v1 identifier
creation
### What changes were proposed in this pull request?
This is a followup to #53788 which moved the namespace length check from
individual command handlers to the `CatalogAndIdentifier` extractor. That
approach is too aggressive: users can extend the session catalog via
`CatalogExtension` and support multi-part namespaces through v2 APIs. The check
should only happen when we actually create v1 identifiers like
`TableIdentifier`, not at the shared name-resolution layer.
### Changes
- **Remove the namespace check from `CatalogAndIdentifier.unapply`** so it
remains a pure name-resolution mechanism, preserving `CatalogExtension`
flexibility.
- **Tighten `CatalogV2Implicits.IdentifierHelper.asTableIdentifier` and
`asFunctionIdentifier`** to require exactly one namespace part and throw
`REQUIRES_SINGLE_PART_NAMESPACE` (instead of the less precise
`IDENTIFIER_TOO_MANY_NAME_PARTS`). This centralizes the validation.
- **Remove the now-redundant `V2SessionCatalog.TableIdentifierHelper`** and
use the unified `CatalogV2Implicits` conversion everywhere.
- **Simplify `ResolveSessionCatalog` extractors** (`ResolvedV1Identifier`,
`ResolvedIdentifierInSessionCatalog`, `ResolvedViewIdentifier`) to delegate to
`ident.asTableIdentifier`.
- **Fix `SparkSqlParser` temp view creation** to check name length before
calling `asTableIdentifier`, so the user always sees
`notAllowedToAddDBPrefixForTempViewError` instead of a generic error.
### Why are the changes needed?
`CatalogExtension` allows users to extend the built-in session catalog and
potentially support multi-part namespaces for v2 operations. The early check in
`CatalogAndIdentifier` would block such extensions. The namespace length should
only be validated when we actually need to create v1 identifiers (e.g.
`TableIdentifier`), which inherently require a single-part namespace.
Additionally, this PR unifies the scattered namespace validation into a
single point (`CatalogV2Implicits.IdentifierHelper`), reducing code duplication
and ensuring consistent `REQUIRES_SINGLE_PART_NAMESPACE` errors.
### Does this PR introduce _any_ user-facing change?
Yes. Multi-part namespace identifiers now flow through
`CatalogAndIdentifier` without error, allowing `CatalogExtension`
implementations to handle them. The error is only thrown when the session
catalog actually needs to convert to a v1 identifier.
### How was this patch tested?
Existing tests updated:
- `LookupCatalogSuite` — removed the early-rejection test, added multi-part
namespace cases back.
- `V2SessionCatalogSuite` — updated to expect
`REQUIRES_SINGLE_PART_NAMESPACE`.
- `DataSourceV2SQLSuiteV1Filter` and `DataSourceV2FunctionSuite` — all 313
tests pass.
### Was this patch authored or co-authored using generative AI tooling?
Yes.
Made with [Cursor](https://cursor.com)
Closes #54247 from cloud-fan/follow.
Authored-by: Wenchen Fan <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 2 +-
.../sql/connector/catalog/CatalogV2Implicits.scala | 8 +++---
.../sql/connector/catalog/LookupCatalog.scala | 17 ++++--------
.../spark/sql/errors/QueryCompilationErrors.scala | 4 +--
.../sql/connector/catalog/LookupCatalogSuite.scala | 19 ++-----------
.../catalyst/analysis/ResolveSessionCatalog.scala | 21 ++++----------
.../spark/sql/execution/SparkSqlParser.scala | 6 ++--
.../datasources/v2/V2SessionCatalog.scala | 23 ++--------------
.../datasources/v2/jdbc/JDBCTableCatalog.scala | 3 +-
.../identifier-clause-legacy.sql.out | 12 ++++----
.../analyzer-results/identifier-clause.sql.out | 12 ++++----
.../results/identifier-clause-legacy.sql.out | 12 ++++----
.../sql-tests/results/identifier-clause.sql.out | 12 ++++----
.../sql/connector/DataSourceV2FunctionSuite.scala | 6 ++--
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 6 ++--
.../sql/errors/QueryCompilationErrorsSuite.scala | 16 +++++++----
.../execution/command/v1/ShowColumnsSuite.scala | 2 +-
.../datasources/v2/V2SessionCatalogSuite.scala | 6 ++--
.../org/apache/spark/sql/jdbc/JDBCV2Suite.scala | 32 +++++++++-------------
.../execution/command/CreateNamespaceSuite.scala | 2 +-
20 files changed, 87 insertions(+), 134 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 9d228de9b590..24a12248ce72 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -5366,7 +5366,7 @@
},
"REQUIRES_SINGLE_PART_NAMESPACE" : {
"message" : [
- "<sessionCatalog> requires a single-part namespace, but got <namespace>."
+ "<sessionCatalog> requires a single-part namespace, but got identifier
<identifier>."
],
"sqlState" : "42K05"
},
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
index 726527394deb..cf6052009c92 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala
@@ -166,9 +166,9 @@ private[sql] object CatalogV2Implicits {
def asMultipartIdentifier: Seq[String] = (ident.namespace :+
ident.name).toImmutableArraySeq
def asTableIdentifier: TableIdentifier = ident.namespace match {
- case ns if ns.isEmpty => TableIdentifier(ident.name)
case Array(dbName) => TableIdentifier(ident.name, Some(dbName))
- case _ => throw
QueryCompilationErrors.identifierTooManyNamePartsError(original)
+ case _ =>
+ throw
QueryCompilationErrors.requiresSinglePartNamespaceError(asMultipartIdentifier)
}
/**
@@ -192,9 +192,9 @@ private[sql] object CatalogV2Implicits {
}
def asFunctionIdentifier: FunctionIdentifier = ident.namespace() match {
- case ns if ns.isEmpty => FunctionIdentifier(ident.name())
case Array(dbName) => FunctionIdentifier(ident.name(), Some(dbName))
- case _ => throw
QueryCompilationErrors.identifierTooManyNamePartsError(original)
+ case _ =>
+ throw
QueryCompilationErrors.requiresSinglePartNamespaceError(asMultipartIdentifier)
}
def toQualifiedNameParts(catalog: CatalogPlugin): Seq[String] = {
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/LookupCatalog.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/LookupCatalog.scala
index 386be0da840a..21f4258bce6d 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/LookupCatalog.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/LookupCatalog.scala
@@ -19,9 +19,7 @@ package org.apache.spark.sql.connector.catalog
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.{SQLConf, StaticSQLConf}
-import org.apache.spark.util.ArrayImplicits._
/**
* A trait to encapsulate catalog lookup function and helpful extractors.
@@ -109,8 +107,8 @@ private[sql] trait LookupCatalog extends Logging {
def unapply(nameParts: Seq[String]): Option[(CatalogPlugin, Identifier)] =
{
assert(nameParts.nonEmpty)
- val (catalog, ident) = if (nameParts.length == 1) {
- (currentCatalog, Identifier.of(catalogManager.currentNamespace,
nameParts.head))
+ if (nameParts.length == 1) {
+ Some((currentCatalog, Identifier.of(catalogManager.currentNamespace,
nameParts.head)))
} else if (nameParts.head.equalsIgnoreCase(globalTempDB)) {
// Conceptually global temp views are in a special reserved catalog.
However, the v2 catalog
// API does not support view yet, and we have to use v1 commands to
deal with global temp
@@ -118,20 +116,15 @@ private[sql] trait LookupCatalog extends Logging {
// in the session catalog. The special namespace has higher priority
during name resolution.
// For example, if the name of a custom catalog is the same with
`GLOBAL_TEMP_DATABASE`,
// this custom catalog can't be accessed.
- (catalogManager.v2SessionCatalog, nameParts.asIdentifier)
+ Some((catalogManager.v2SessionCatalog, nameParts.asIdentifier))
} else {
try {
- (catalogManager.catalog(nameParts.head), nameParts.tail.asIdentifier)
+ Some((catalogManager.catalog(nameParts.head),
nameParts.tail.asIdentifier))
} catch {
case _: CatalogNotFoundException =>
- (currentCatalog, nameParts.asIdentifier)
+ Some((currentCatalog, nameParts.asIdentifier))
}
}
- if (CatalogV2Util.isSessionCatalog(catalog) && ident.namespace().length
!= 1) {
- throw QueryCompilationErrors.requiresSinglePartNamespaceError(
- ident.namespace().toImmutableArraySeq)
- }
- Some((catalog, ident))
}
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index b4b01d680367..a6174b82077b 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1542,12 +1542,12 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
new TableAlreadyExistsException(ident.asMultipartIdentifier)
}
- def requiresSinglePartNamespaceError(namespace: Seq[String]): Throwable = {
+ def requiresSinglePartNamespaceError(identifier: Seq[String]): Throwable = {
new AnalysisException(
errorClass = "REQUIRES_SINGLE_PART_NAMESPACE",
messageParameters = Map(
"sessionCatalog" -> CatalogManager.SESSION_CATALOG_NAME,
- "namespace" -> toSQLId(namespace)))
+ "identifier" -> toSQLId(identifier)))
}
def namespaceAlreadyExistsError(namespace: Array[String]): Throwable = {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/LookupCatalogSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/LookupCatalogSuite.scala
index f19a512265a6..ac0eeb74349a 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/LookupCatalogSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/LookupCatalogSuite.scala
@@ -57,15 +57,15 @@ class LookupCatalogSuite extends SparkFunSuite with
LookupCatalog with Inside {
test("catalog and identifier") {
Seq(
- // Session catalog with single-part namespace
("tbl", sessionCatalog, Seq("default"), "tbl"),
("db.tbl", sessionCatalog, Seq("db"), "tbl"),
(s"$globalTempDB.tbl", sessionCatalog, Seq(globalTempDB), "tbl"),
+ (s"$globalTempDB.ns1.ns2.tbl", sessionCatalog, Seq(globalTempDB, "ns1",
"ns2"), "tbl"),
+ ("ns1.ns2.tbl", sessionCatalog, Seq("ns1", "ns2"), "tbl"),
("`db.tbl`", sessionCatalog, Seq("default"), "db.tbl"),
("parquet.`file:/tmp/db.tbl`", sessionCatalog, Seq("parquet"),
"file:/tmp/db.tbl"),
("`org.apache.spark.sql.json`.`s3://buck/tmp/abc.json`", sessionCatalog,
Seq("org.apache.spark.sql.json"), "s3://buck/tmp/abc.json"),
- // Non-session catalogs (no namespace restriction)
("prod.func", catalogs("prod"), Seq.empty, "func"),
("prod.db.tbl", catalogs("prod"), Seq("db"), "tbl"),
("test.db.tbl", catalogs("test"), Seq("db"), "tbl"),
@@ -79,21 +79,6 @@ class LookupCatalogSuite extends SparkFunSuite with
LookupCatalog with Inside {
}
}
- test("session catalog requires single-part namespace") {
- // Multi-part namespaces are not allowed for session catalog
- Seq(
- "ns1.ns2.tbl", // two-part namespace
- s"$globalTempDB.ns1.ns2.tbl" // three-part namespace
- ).foreach { sql =>
- val e = intercept[org.apache.spark.sql.AnalysisException] {
- parseMultipartIdentifier(sql) match {
- case CatalogAndIdentifier(_, _) =>
- }
- }
- assert(e.getCondition === "REQUIRES_SINGLE_PART_NAMESPACE")
- }
- }
-
test("table identifier") {
Seq(
("tbl", "tbl", None),
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index 92ffbd08f814..39162ed69f24 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -37,7 +37,6 @@ import
org.apache.spark.sql.execution.datasources.v2.DataSourceV2Utils
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
import org.apache.spark.sql.internal.connector.V1Function
import org.apache.spark.sql.types.{DataType, MetadataBuilder, StringType,
StructField, StructType}
-import org.apache.spark.util.ArrayImplicits._
import org.apache.spark.util.SparkStringUtils
/**
@@ -727,10 +726,10 @@ class ResolveSessionCatalog(val catalogManager:
CatalogManager)
def unapply(resolved: LogicalPlan): Option[TableIdentifier] = resolved
match {
case ResolvedPersistentView(catalog, ident, _) =>
assert(isSessionCatalog(catalog))
- assert(ident.namespace().length == 1)
- Some(TableIdentifier(ident.name, Some(ident.namespace.head),
Some(catalog.name)))
+ Some(ident.asTableIdentifier.copy(catalog = Some(catalog.name)))
- case ResolvedTempView(ident, _) => Some(ident.asTableIdentifier)
+ case ResolvedTempView(ident, _) =>
+ Some(TableIdentifier(ident.name(), ident.namespace().headOption))
case _ => None
}
@@ -763,24 +762,16 @@ class ResolveSessionCatalog(val catalogManager:
CatalogManager)
object ResolvedV1Identifier {
def unapply(resolved: LogicalPlan): Option[TableIdentifier] = resolved
match {
case ResolvedIdentifier(catalog, ident) if supportsV1Command(catalog) =>
- if (ident.namespace().length != 1) {
- throw QueryCompilationErrors
-
.requiresSinglePartNamespaceError(ident.namespace().toImmutableArraySeq)
- }
- Some(TableIdentifier(ident.name, Some(ident.namespace.head),
Some(catalog.name)))
+ Some(ident.asTableIdentifier.copy(catalog = Some(catalog.name)))
case _ => None
}
}
// Use this object to help match commands that do not have a v2
implementation.
- object ResolvedIdentifierInSessionCatalog{
+ object ResolvedIdentifierInSessionCatalog {
def unapply(resolved: LogicalPlan): Option[TableIdentifier] = resolved
match {
case ResolvedIdentifier(catalog, ident) if isSessionCatalog(catalog) =>
- if (ident.namespace().length != 1) {
- throw QueryCompilationErrors
-
.requiresSinglePartNamespaceError(ident.namespace().toImmutableArraySeq)
- }
- Some(TableIdentifier(ident.name, Some(ident.namespace.head),
Some(catalog.name)))
+ Some(ident.asTableIdentifier.copy(catalog = Some(catalog.name)))
case _ => None
}
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 26b408f069d4..75403e14c608 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -698,12 +698,12 @@ class SparkSqlAstBuilder extends AstBuilder {
}
withIdentClause(ctx.identifierReference(), Seq(qPlan), (ident,
otherPlans) => {
- val tableIdentifier = ident.asTableIdentifier
- if (tableIdentifier.database.isDefined) {
+ if (ident.length > 1) {
// Temporary view names should NOT contain database prefix like
"database.table"
throw QueryParsingErrors
-
.notAllowedToAddDBPrefixForTempViewError(tableIdentifier.nameParts, ctx)
+ .notAllowedToAddDBPrefixForTempViewError(ident, ctx)
}
+ val tableIdentifier = TableIdentifier(ident.head)
CreateViewCommand(
tableIdentifier,
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
index 19303ca3d2e8..07d5477754f2 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
@@ -24,7 +24,7 @@ import scala.collection.mutable
import scala.jdk.CollectionConverters._
import org.apache.spark.SparkUnsupportedOperationException
-import org.apache.spark.sql.catalyst.{FunctionIdentifier, QualifiedTableName,
SQLConfHelper, TableIdentifier}
+import org.apache.spark.sql.catalyst.{QualifiedTableName, SQLConfHelper}
import org.apache.spark.sql.catalyst.analysis.{NoSuchNamespaceException,
NoSuchTableException, TableAlreadyExistsException}
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase,
CatalogStorageFormat, CatalogTable, CatalogTableType, CatalogUtils,
ClusterBySpec, SessionCatalog}
import org.apache.spark.sql.catalyst.util.TypeUtils._
@@ -45,6 +45,7 @@ import org.apache.spark.util.Utils
*/
class V2SessionCatalog(catalog: SessionCatalog)
extends TableCatalog with FunctionCatalog with SupportsNamespaces with
SQLConfHelper {
+ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import V2SessionCatalog._
override val defaultNamespace: Array[String] = Array(conf.defaultDatabase)
@@ -367,26 +368,6 @@ class V2SessionCatalog(catalog: SessionCatalog)
catalog.renameTable(oldIdent.asTableIdentifier, newIdent.asTableIdentifier)
}
- implicit class TableIdentifierHelper(ident: Identifier) {
- def asTableIdentifier: TableIdentifier = {
- ident.namespace match {
- case Array(db) =>
- TableIdentifier(ident.name, Some(db))
- case other =>
- throw
QueryCompilationErrors.requiresSinglePartNamespaceError(other.toImmutableArraySeq)
- }
- }
-
- def asFunctionIdentifier: FunctionIdentifier = {
- ident.namespace match {
- case Array(db) =>
- FunctionIdentifier(ident.name, Some(db))
- case other =>
- throw
QueryCompilationErrors.requiresSinglePartNamespaceError(other.toImmutableArraySeq)
- }
- }
- }
-
override def namespaceExists(namespace: Array[String]): Boolean = namespace
match {
case Array(db) =>
catalog.databaseExists(db)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
index 611d166a231a..ff6d28744364 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/jdbc/JDBCTableCatalog.scala
@@ -40,7 +40,6 @@ class JDBCTableCatalog extends TableCatalog
with FunctionCatalog
with DataTypeErrorsBase
with Logging {
- import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
private var catalogName: String = null
private var options: JDBCOptions = _
@@ -434,7 +433,7 @@ class JDBCTableCatalog extends TableCatalog
override def loadFunction(ident: Identifier): UnboundFunction = {
if (ident.namespace().nonEmpty) {
- throw
QueryCompilationErrors.noSuchFunctionError(ident.asFunctionIdentifier)
+ throw new NoSuchFunctionException(ident)
}
functions.get(ident.name()) match {
case Some(func) =>
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
index f0a7722886ed..8c2604aa34d2 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
@@ -883,7 +883,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -897,7 +897,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -911,7 +911,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -925,7 +925,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -939,7 +939,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -975,7 +975,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
index 00740529b8a8..73467322d081 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
@@ -883,7 +883,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -897,7 +897,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -911,7 +911,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -925,7 +925,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -939,7 +939,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -975,7 +975,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/resources/sql-tests/results/identifier-clause-legacy.sql.out
b/sql/core/src/test/resources/sql-tests/results/identifier-clause-legacy.sql.out
index ff28515d2aa0..cdc6f0560276 100644
---
a/sql/core/src/test/resources/sql-tests/results/identifier-clause-legacy.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/identifier-clause-legacy.sql.out
@@ -1011,7 +1011,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1027,7 +1027,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1043,7 +1043,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1059,7 +1059,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1075,7 +1075,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1115,7 +1115,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/resources/sql-tests/results/identifier-clause.sql.out
b/sql/core/src/test/resources/sql-tests/results/identifier-clause.sql.out
index 8465500e5140..a4371c8be49f 100644
--- a/sql/core/src/test/resources/sql-tests/results/identifier-clause.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/identifier-clause.sql.out
@@ -1011,7 +1011,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1027,7 +1027,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1043,7 +1043,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1059,7 +1059,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`",
+ "identifier" : "`a`.`b`.`c`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1075,7 +1075,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
}
}
@@ -1115,7 +1115,7 @@ org.apache.spark.sql.AnalysisException
"errorClass" : "REQUIRES_SINGLE_PART_NAMESPACE",
"sqlState" : "42K05",
"messageParameters" : {
- "namespace" : "`a`.`b`.`c`",
+ "identifier" : "`a`.`b`.`c`.`d`",
"sessionCatalog" : "spark_catalog"
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
index c3af92a82ca8..48bad3326517 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
@@ -185,7 +185,7 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase
{
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> "spark_catalog",
- "namespace" -> "`default`.`ns1`.`ns2`")
+ "identifier" -> "`default`.`ns1`.`ns2`.`fun`")
)
}
@@ -204,7 +204,7 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase
{
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> "spark_catalog",
- "namespace" -> "`default`.`ns1`.`ns2`")
+ "identifier" -> "`default`.`ns1`.`ns2`.`fun`")
)
}
@@ -240,7 +240,7 @@ class DataSourceV2FunctionSuite extends DatasourceV2SQLBase
{
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> "spark_catalog",
- "namespace" -> "`default`.`ns1`.`ns2`")
+ "identifier" -> "`default`.`ns1`.`ns2`.`fun`")
)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 0044bd1c2b44..50b3bebaeb77 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2582,7 +2582,7 @@ class DataSourceV2SQLSuiteV1Filter
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> "spark_catalog",
- "namespace" -> "`global_temp`.`ns1`.`ns2`"))
+ "identifier" -> "`global_temp`.`ns1`.`ns2`.`tbl`"))
}
test("table name same as catalog can be used") {
@@ -2616,7 +2616,9 @@ class DataSourceV2SQLSuiteV1Filter
checkError(
exception = analysisException(sql),
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
- parameters = Map("sessionCatalog" -> "spark_catalog", "namespace"
-> ""))
+ parameters = Map(
+ "sessionCatalog" -> "spark_catalog",
+ "identifier" -> "`t`"))
}
verify(s"select * from $t")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 45a656f7a6d5..e035a4c4119e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -712,15 +712,21 @@ class QueryCompilationErrorsSuite
)
}
- test("IDENTIFIER_TOO_MANY_NAME_PARTS: " +
+ test("TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS: " +
"create temp view doesn't support identifiers consisting of more than 2
parts") {
+ val sqlText =
+ "CREATE TEMPORARY VIEW db_name.schema_name.view_name AS SELECT '1' as
test_column"
checkError(
exception = intercept[ParseException] {
- sql("CREATE TEMPORARY VIEW db_name.schema_name.view_name AS SELECT '1'
as test_column")
+ sql(sqlText)
},
- condition = "IDENTIFIER_TOO_MANY_NAME_PARTS",
- sqlState = "42601",
- parameters = Map("identifier" -> "`db_name`.`schema_name`.`view_name`",
"limit" -> "2")
+ condition = "TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS",
+ sqlState = "428EK",
+ parameters = Map("actualName" -> "`db_name`.`schema_name`.`view_name`"),
+ context = ExpectedContext(
+ fragment = sqlText,
+ start = 0,
+ stop = sqlText.length - 1)
)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
index 3e8ac98dbf76..a541f80cba91 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowColumnsSuite.scala
@@ -41,7 +41,7 @@ trait ShowColumnsSuiteBase extends
command.ShowColumnsSuiteBase {
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> catalog,
- "namespace" -> "`a`.`b`.`c`"
+ "identifier" -> "`a`.`b`.`c`.`tbl`"
)
)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
index 3f25a1e139fa..6f22ddf09757 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalogSuite.scala
@@ -1162,8 +1162,10 @@ class V2SessionCatalogNamespaceSuite extends
V2SessionCatalogBaseSuite {
val testIdent: IdentifierHelper = Identifier.of(Array("a", "b"), "c")
checkError(
exception = intercept[AnalysisException](testIdent.asTableIdentifier),
- condition = "IDENTIFIER_TOO_MANY_NAME_PARTS",
- parameters = Map("identifier" -> "`a`.`b`.`c`", "limit" -> "2")
+ condition = "REQUIRES_SINGLE_PART_NAMESPACE",
+ parameters = Map(
+ "sessionCatalog" -> "spark_catalog",
+ "identifier" -> "`a`.`b`.`c`")
)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
index fd88559d4f98..d65a5672788d 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCV2Suite.scala
@@ -2966,6 +2966,19 @@ class JDBCV2Suite extends QueryTest with
SharedSparkSession with ExplainSuiteHel
fragment = "h2.my_avg2(id)",
start = 7,
stop = 20))
+ // Multi-part namespace should also result in UNRESOLVED_ROUTINE
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql("SELECT * FROM h2.test.people where
h2.db_name.schema_name.function_name()")
+ },
+ condition = "UNRESOLVED_ROUTINE",
+ parameters = Map(
+ "routineName" -> "`h2`.`db_name`.`schema_name`.`function_name`",
+ "searchPath" -> "[`system`.`builtin`, `system`.`session`,
`h2`.`default`]"),
+ context = ExpectedContext(
+ fragment = "h2.db_name.schema_name.function_name()",
+ start = 35,
+ stop = 72))
} finally {
JdbcDialects.unregisterDialect(testH2Dialect)
JdbcDialects.registerDialect(h2Dialect)
@@ -3067,25 +3080,6 @@ class JDBCV2Suite extends QueryTest with
SharedSparkSession with ExplainSuiteHel
assert(indexes3.isEmpty)
}
- test("IDENTIFIER_TOO_MANY_NAME_PARTS: " +
- "jdbc function doesn't support identifiers consisting of more than 2
parts") {
- JdbcDialects.unregisterDialect(h2Dialect)
- try {
- JdbcDialects.registerDialect(testH2Dialect)
- checkError(
- exception = intercept[AnalysisException] {
- sql("SELECT * FROM h2.test.people where
h2.db_name.schema_name.function_name()")
- },
- condition = "IDENTIFIER_TOO_MANY_NAME_PARTS",
- sqlState = "42601",
- parameters = Map("identifier" ->
"`db_name`.`schema_name`.`function_name`", "limit" -> "2")
- )
- } finally {
- JdbcDialects.unregisterDialect(testH2Dialect)
- JdbcDialects.registerDialect(h2Dialect)
- }
- }
-
test("Explain shows executed SQL query") {
val df = sql("SELECT max(id) FROM h2.test.people WHERE id > 1")
val explained = getNormalizedExplain(df, FormattedMode)
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/CreateNamespaceSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/CreateNamespaceSuite.scala
index 6e3c3223520c..08bcab12d1c9 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/CreateNamespaceSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/CreateNamespaceSuite.scala
@@ -37,7 +37,7 @@ class CreateNamespaceSuite extends
v1.CreateNamespaceSuiteBase with CommandSuite
condition = "REQUIRES_SINGLE_PART_NAMESPACE",
parameters = Map(
"sessionCatalog" -> catalog,
- "namespace" -> "`ns1`.`ns2`"
+ "identifier" -> "`ns1`.`ns2`"
)
)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]