This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 69803fb0244 [SPARK-43791][SQL] Assign a name to the error class
_LEGACY_ERROR_TEMP_1336
69803fb0244 is described below
commit 69803fb0244c9fc110653092bcfab7c221448bce
Author: panbingkun <[email protected]>
AuthorDate: Fri May 26 09:29:21 2023 +0300
[SPARK-43791][SQL] Assign a name to the error class _LEGACY_ERROR_TEMP_1336
### What changes were proposed in this pull request?
The pr aims to assign a name to the error class _LEGACY_ERROR_TEMP_1336.
### Why are the changes needed?
The changes improve the error framework.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
Update existed UT.
Pass GA.
Closes #41309 from panbingkun/LEGACY_ERROR_TEMP_1336.
Authored-by: panbingkun <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
core/src/main/resources/error/error-classes.json | 10 ++++----
.../spark/sql/catalyst/analysis/Analyzer.scala | 3 +--
.../sql/catalyst/analysis/CTESubstitution.scala | 3 ++-
.../spark/sql/errors/QueryCompilationErrors.scala | 6 ++---
.../spark/sql/execution/datasources/rules.scala | 3 ++-
.../datasources/v2/V2SessionCatalog.scala | 4 +++-
.../spark/sql/connector/DataSourceV2SQLSuite.scala | 8 +++----
.../spark/sql/execution/SQLViewTestSuite.scala | 27 ++++++++++++++++------
8 files changed, 40 insertions(+), 24 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json
b/core/src/main/resources/error/error-classes.json
index 7683e7b8650..0246d4f378e 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -2139,6 +2139,11 @@
"Table <tableName> does not support <operation>. Please check the
current catalog and namespace to make sure the qualified table name is
expected, and also check the catalog implementation which is configured by
\"spark.sql.catalog\"."
]
},
+ "TIME_TRAVEL" : {
+ "message" : [
+ "Time travel on the relation: <relationId>."
+ ]
+ },
"TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS" : {
"message" : [
"UDF class with <num> type arguments."
@@ -3916,11 +3921,6 @@
"<expr> is not a valid timestamp expression for time travel."
]
},
- "_LEGACY_ERROR_TEMP_1336" : {
- "message" : [
- "Cannot time travel <target>."
- ]
- },
"_LEGACY_ERROR_TEMP_1337" : {
"message" : [
"Table <tableName> does not support time travel."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 604fc3f84c8..dc7134a9605 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -1169,8 +1169,7 @@ class Analyzer(override val catalogManager:
CatalogManager) extends RuleExecutor
throw
QueryCompilationErrors.readNonStreamingTempViewError(identifier.quoted)
}
if (isTimeTravel) {
- val target = if (tempViewPlan.isStreaming) "streams" else "views"
- throw QueryCompilationErrors.timeTravelUnsupportedError(target)
+ throw
QueryCompilationErrors.timeTravelUnsupportedError(toSQLId(identifier))
}
tempViewPlan
}
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CTESubstitution.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CTESubstitution.scala
index 77c687843c3..4e3234f9c0d 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CTESubstitution.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CTESubstitution.scala
@@ -23,6 +23,7 @@ import
org.apache.spark.sql.catalyst.expressions.SubqueryExpression
import org.apache.spark.sql.catalyst.plans.logical.{Command, CTERelationDef,
CTERelationRef, InsertIntoDir, LogicalPlan, ParsedStatement, SubqueryAlias,
UnresolvedWith, WithCTE}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.trees.TreePattern._
+import org.apache.spark.sql.catalyst.util.TypeUtils._
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.internal.SQLConf.{LEGACY_CTE_PRECEDENCE_POLICY,
LegacyBehaviorPolicy}
@@ -253,7 +254,7 @@ object CTESubstitution extends Rule[LogicalPlan] {
_.containsAnyPattern(RELATION_TIME_TRAVEL, UNRESOLVED_RELATION,
PLAN_EXPRESSION)) {
case RelationTimeTravel(UnresolvedRelation(Seq(table), _, _), _, _)
if cteRelations.exists(r => plan.conf.resolver(r._1, table)) =>
- throw QueryCompilationErrors.timeTravelUnsupportedError("subqueries
from WITH clause")
+ throw QueryCompilationErrors.timeTravelUnsupportedError(toSQLId(table))
case u @ UnresolvedRelation(Seq(table), _, _) =>
cteRelations.find(r => plan.conf.resolver(r._1, table)).map { case (_,
d) =>
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 648c82836ac..879bf620188 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3136,10 +3136,10 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase {
messageParameters = Map("expr" -> expr.sql))
}
- def timeTravelUnsupportedError(target: String): Throwable = {
+ def timeTravelUnsupportedError(relationId: String): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1336",
- messageParameters = Map("target" -> target))
+ errorClass = "UNSUPPORTED_FEATURE.TIME_TRAVEL",
+ messageParameters = Map("relationId" -> relationId))
}
def tableNotSupportTimeTravelError(tableName: Identifier): Throwable = {
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
index 2564d7e50a2..dc9d0999d1a 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala
@@ -25,6 +25,7 @@ import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions.{Expression,
InputFileBlockLength, InputFileBlockStart, InputFileName, RowOrdering}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.util.TypeUtils._
import org.apache.spark.sql.connector.expressions.{FieldReference,
RewritableTransform}
import org.apache.spark.sql.errors.QueryCompilationErrors
import org.apache.spark.sql.execution.command.DDLUtils
@@ -64,7 +65,7 @@ class ResolveSQLOnFile(sparkSession: SparkSession) extends
Rule[LogicalPlan] {
// so we should leave it be for now.
try {
resolveDataSource(u.multipartIdentifier)
- throw QueryCompilationErrors.timeTravelUnsupportedError("path-based
tables")
+ throw
QueryCompilationErrors.timeTravelUnsupportedError(toSQLId(u.multipartIdentifier))
} catch {
case _: ClassNotFoundException => r
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
index b4789c98df9..437194b7b5b 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2SessionCatalog.scala
@@ -26,6 +26,7 @@ import scala.collection.mutable
import org.apache.spark.sql.catalyst.{FunctionIdentifier, SQLConfHelper,
TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException,
NoSuchTableException, TableAlreadyExistsException}
import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable,
CatalogTableType, CatalogUtils, SessionCatalog}
+import org.apache.spark.sql.catalyst.util.TypeUtils._
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogV2Util,
Column, FunctionCatalog, Identifier, NamespaceChange, SupportsNamespaces,
Table, TableCatalog, TableCatalogCapability, TableChange, V1Table}
import org.apache.spark.sql.connector.catalog.NamespaceChange.RemoveProperty
import org.apache.spark.sql.connector.catalog.functions.UnboundFunction
@@ -85,7 +86,8 @@ class V2SessionCatalog(catalog: SessionCatalog)
t match {
case V1Table(catalogTable) =>
if (catalogTable.tableType == CatalogTableType.VIEW) {
- throw QueryCompilationErrors.timeTravelUnsupportedError("views")
+ throw QueryCompilationErrors.timeTravelUnsupportedError(
+ toSQLId(catalogTable.identifier.nameParts))
} else {
throw QueryCompilationErrors.tableNotSupportTimeTravelError(ident)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
index 968e91e31bd..1aa216cf7ce 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala
@@ -2977,17 +2977,17 @@ class DataSourceV2SQLSuiteV1Filter
exception = intercept[AnalysisException] {
sql("SELECT * FROM parquet.`/the/path` VERSION AS OF 1")
},
- errorClass = "_LEGACY_ERROR_TEMP_1336",
+ errorClass = "UNSUPPORTED_FEATURE.TIME_TRAVEL",
sqlState = None,
- parameters = Map("target" -> "path-based tables"))
+ parameters = Map("relationId" -> "`parquet`.`/the/path`"))
checkError(
exception = intercept[AnalysisException] {
sql("WITH x AS (SELECT 1) SELECT * FROM x VERSION AS OF 1")
},
- errorClass = "_LEGACY_ERROR_TEMP_1336",
+ errorClass = "UNSUPPORTED_FEATURE.TIME_TRAVEL",
sqlState = None,
- parameters = Map("target" -> "subqueries from WITH clause"))
+ parameters = Map("relationId" -> "`x`"))
val subquery1 = "SELECT 1 FROM non_exist"
checkError(
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
index 4e89d6fdf5e..073e6c6413b 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala
@@ -17,6 +17,8 @@
package org.apache.spark.sql.execution
+import java.util.Locale
+
import scala.collection.JavaConverters._
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
@@ -25,6 +27,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogFunction
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.logical.Repartition
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.withDefaultTimeZone
+import org.apache.spark.sql.catalyst.util.TypeUtils._
import org.apache.spark.sql.connector.catalog._
import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf._
@@ -398,15 +401,21 @@ abstract class SQLViewTestSuite extends QueryTest with
SQLTestUtils {
test("SPARK-37219: time travel is unsupported") {
val viewName = createView("testView", "SELECT 1 col")
withView(viewName) {
- val e1 = intercept[AnalysisException](
- sql(s"SELECT * FROM $viewName VERSION AS OF 1").collect()
+ checkError(
+ exception = intercept[AnalysisException](
+ sql(s"SELECT * FROM $viewName VERSION AS OF 1").collect()
+ ),
+ errorClass = "UNSUPPORTED_FEATURE.TIME_TRAVEL",
+ parameters = Map("relationId" ->
toSQLId(fullyQualifiedViewName("testView")))
)
- assert(e1.message.contains("Cannot time travel views"))
- val e2 = intercept[AnalysisException](
- sql(s"SELECT * FROM $viewName TIMESTAMP AS OF '2000-10-10'").collect()
+ checkError(
+ exception = intercept[AnalysisException](
+ sql(s"SELECT * FROM $viewName TIMESTAMP AS OF
'2000-10-10'").collect()
+ ),
+ errorClass = "UNSUPPORTED_FEATURE.TIME_TRAVEL",
+ parameters = Map("relationId" ->
toSQLId(fullyQualifiedViewName("testView")))
)
- assert(e2.message.contains("Cannot time travel views"))
}
}
@@ -524,7 +533,11 @@ class PersistedViewTestSuite extends SQLViewTestSuite with
SharedSparkSession {
override protected def viewTypeString: String = "VIEW"
override protected def formattedViewName(viewName: String): String =
s"$db.$viewName"
override protected def fullyQualifiedViewName(viewName: String): String =
- s"spark_catalog.$db.$viewName"
+ conf.caseSensitiveAnalysis match {
+ case true => s"spark_catalog.$db.$viewName"
+ case false => s"spark_catalog.$db.${viewName.toLowerCase(Locale.ROOT)}"
+ }
+
override protected def tableIdentifier(viewName: String): TableIdentifier = {
TableIdentifier(viewName, Some(db), Some(SESSION_CATALOG_NAME))
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]