This is an automated email from the ASF dual-hosted git repository.
haejoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 507de06137f1 [SPARK-50083][SQL] Integrate `_LEGACY_ERROR_TEMP_1231`
into `PARTITIONS_NOT_FOUND`
507de06137f1 is described below
commit 507de06137f1cae21628652e41033c8cc90e2705
Author: Haejoon Lee <[email protected]>
AuthorDate: Wed Jan 8 15:15:49 2025 +0900
[SPARK-50083][SQL] Integrate `_LEGACY_ERROR_TEMP_1231` into
`PARTITIONS_NOT_FOUND`
### What changes were proposed in this pull request?
This PR proposes to Integrate `_LEGACY_ERROR_TEMP_1231 ` into
`PARTITIONS_NOT_FOUND `
### Why are the changes needed?
To improve the error message by assigning proper error condition and
SQLSTATE
### Does this PR introduce _any_ user-facing change?
No, only user-facing error message improved
### How was this patch tested?
Updated the existing tests
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #48614 from itholic/LEGACY_1231.
Authored-by: Haejoon Lee <[email protected]>
Signed-off-by: Haejoon Lee <[email protected]>
---
.../src/main/resources/error/error-conditions.json | 5 --
.../spark/sql/errors/QueryCompilationErrors.scala | 8 +--
.../sql-tests/analyzer-results/show-tables.sql.out | 7 +--
.../sql-tests/results/show-tables.sql.out | 7 +--
.../command/AlterTableAddPartitionSuiteBase.scala | 19 +++++--
.../command/AlterTableDropPartitionSuiteBase.scala | 19 +++++--
.../AlterTableRenamePartitionSuiteBase.scala | 19 +++++--
.../command/ShowPartitionsSuiteBase.scala | 19 +++++--
.../execution/command/TruncateTableSuiteBase.scala | 63 +++++++++++++++++-----
.../command/v1/AlterTableSetLocationSuite.scala | 4 +-
.../sql/execution/command/v2/ShowTablesSuite.scala | 4 +-
.../apache/spark/sql/hive/StatisticsSuite.scala | 41 +++++++++-----
.../spark/sql/hive/execution/HiveDDLSuite.scala | 6 +--
.../hive/execution/command/ShowTablesSuite.scala | 4 +-
14 files changed, 159 insertions(+), 66 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 297e0ec1d367..a8ff0809be73 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -6848,11 +6848,6 @@
"Decimal scale (<scale>) cannot be greater than precision (<precision>)."
]
},
- "_LEGACY_ERROR_TEMP_1231" : {
- "message" : [
- "<key> is not a valid partition column in table <tblName>."
- ]
- },
"_LEGACY_ERROR_TEMP_1232" : {
"message" : [
"Partition spec is invalid. The spec (<specKeys>) must match the
partition spec (<partitionColumnNames>) defined in table '<tableName>'."
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index ac419fd150ae..0d5fe7bc1459 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2673,12 +2673,12 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
"comment" -> comment))
}
- def invalidPartitionColumnKeyInTableError(key: String, tblName: String):
Throwable = {
+ def invalidPartitionColumnKeyInTableError(key: String, tableName: String):
Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1231",
+ errorClass = "PARTITIONS_NOT_FOUND",
messageParameters = Map(
- "key" -> key,
- "tblName" -> toSQLId(tblName)))
+ "partitionList" -> toSQLId(key),
+ "tableName" -> toSQLId(tableName)))
}
def invalidPartitionSpecError(
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/show-tables.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/show-tables.sql.out
index a86cc72f0863..8ae12b928d72 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/show-tables.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/show-tables.sql.out
@@ -166,10 +166,11 @@ SHOW TABLE EXTENDED LIKE 'show_t1' PARTITION(a='Us', d=1)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1231",
+ "errorClass" : "PARTITIONS_NOT_FOUND",
+ "sqlState" : "428FT",
"messageParameters" : {
- "key" : "a",
- "tblName" : "`spark_catalog`.`showdb`.`show_t1`"
+ "partitionList" : "`a`",
+ "tableName" : "`spark_catalog`.`showdb`.`show_t1`"
}
}
diff --git a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
index a4b967ca61f0..af1bb75aef88 100644
--- a/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/show-tables.sql.out
@@ -247,10 +247,11 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
- "errorClass" : "_LEGACY_ERROR_TEMP_1231",
+ "errorClass" : "PARTITIONS_NOT_FOUND",
+ "sqlState" : "428FT",
"messageParameters" : {
- "key" : "a",
- "tblName" : "`spark_catalog`.`showdb`.`show_t1`"
+ "partitionList" : "`a`",
+ "tableName" : "`spark_catalog`.`showdb`.`show_t1`"
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
index cb25942822f4..13ea6f5a3053 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableAddPartitionSuiteBase.scala
@@ -23,6 +23,7 @@ import org.apache.spark.SparkNumberFormatException
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.quoteIdentifier
+import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
/**
@@ -97,10 +98,20 @@ trait AlterTableAddPartitionSuiteBase extends QueryTest
with DDLCommandTestUtils
withNamespaceAndTable("ns", "tbl") { t =>
spark.sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing
PARTITIONED BY (id)")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- val errMsg = intercept[AnalysisException] {
- spark.sql(s"ALTER TABLE $t ADD PARTITION (ID=1) LOCATION 'loc1'")
- }.getMessage
- assert(errMsg.contains("ID is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`tbl`"
+ } else {
+ "`test_catalog`.`ns`.`tbl`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ spark.sql(s"ALTER TABLE $t ADD PARTITION (ID=1) LOCATION 'loc1'")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`ID`",
+ "tableName" -> expectedTableName)
+ )
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
spark.sql(s"ALTER TABLE $t ADD PARTITION (ID=1) LOCATION 'loc1'")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala
index 279042f675cd..a49a94174195 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala
@@ -21,6 +21,7 @@ import org.apache.spark.sql.{AnalysisException, QueryTest,
Row}
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.quoteIdentifier
+import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
/**
@@ -103,10 +104,20 @@ trait AlterTableDropPartitionSuiteBase extends QueryTest
with DDLCommandTestUtil
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED
BY (id)")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- val errMsg = intercept[AnalysisException] {
- sql(s"ALTER TABLE $t DROP PARTITION (ID=1)")
- }.getMessage
- assert(errMsg.contains("ID is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`tbl`"
+ } else {
+ "`test_catalog`.`ns`.`tbl`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t DROP PARTITION (ID=1)")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`ID`",
+ "tableName" -> expectedTableName)
+ )
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala
index 905e6cfb9caa..186f2b293ea8 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableRenamePartitionSuiteBase.scala
@@ -21,6 +21,7 @@ import org.apache.spark.sql.{AnalysisException, QueryTest,
Row}
import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException,
PartitionsAlreadyExistException}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.quoteIdentifier
+import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
/**
@@ -170,10 +171,20 @@ trait AlterTableRenamePartitionSuiteBase extends
QueryTest with DDLCommandTestUt
checkPartitions(t, Map("id" -> "1"))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- val errMsg = intercept[AnalysisException] {
- sql(s"ALTER TABLE $t PARTITION (ID = 1) RENAME TO PARTITION (id =
2)")
- }.getMessage
- assert(errMsg.contains("ID is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`tbl`"
+ } else {
+ "`test_catalog`.`ns`.`tbl`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ALTER TABLE $t PARTITION (ID = 1) RENAME TO PARTITION (id =
2)")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`ID`",
+ "tableName" -> expectedTableName)
+ )
checkPartitions(t, Map("id" -> "1"))
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
index 462b967a7590..f7d41556b4e6 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.command
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
+import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{StringType, StructType}
@@ -66,10 +67,20 @@ trait ShowPartitionsSuiteBase extends QueryTest with
DDLCommandTestUtils {
test("non-partitioning columns") {
withNamespaceAndTable("ns", "dateTable") { t =>
createDateTable(t)
- val errMsg = intercept[AnalysisException] {
- sql(s"SHOW PARTITIONS $t PARTITION(abcd=2015, xyz=1)")
- }.getMessage
- assert(errMsg.contains("abcd is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`datetable`"
+ } else {
+ "`test_catalog`.`ns`.`dateTable`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"SHOW PARTITIONS $t PARTITION(abcd=2015, xyz=1)")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`abcd`",
+ "tableName" -> expectedTableName)
+ )
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala
index 8c985ea1f052..b61065f41c5e 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/TruncateTableSuiteBase.scala
@@ -21,6 +21,7 @@ import org.apache.spark.sql.{AnalysisException, QueryTest,
Row}
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionException
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.quoteIdentifier
+import
org.apache.spark.sql.connector.catalog.CatalogManager.SESSION_CATALOG_NAME
import org.apache.spark.sql.internal.SQLConf
/**
@@ -103,10 +104,20 @@ trait TruncateTableSuiteBase extends QueryTest with
DDLCommandTestUtils {
}
// throw exception if the column in partition spec is not a partition
column.
- val errMsg = intercept[AnalysisException] {
- sql(s"TRUNCATE TABLE $t PARTITION (unknown = 1)")
- }.getMessage
- assert(errMsg.contains("unknown is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`parttable`"
+ } else {
+ "`test_catalog`.`ns`.`partTable`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"TRUNCATE TABLE $t PARTITION (unknown = 1)")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`unknown`",
+ "tableName" -> expectedTableName)
+ )
}
}
@@ -117,10 +128,28 @@ trait TruncateTableSuiteBase extends QueryTest with
DDLCommandTestUtils {
sql(s"CREATE TABLE $t (c0 INT) $defaultUsing")
sql(s"INSERT INTO $t SELECT 0")
- val errMsg = intercept[AnalysisException] {
- sql(s"TRUNCATE TABLE $t PARTITION (c0=1)")
- }.getMessage
- assert(errMsg.contains(invalidPartColumnError))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`tbl`"
+ } else {
+ "`test_catalog`.`ns`.`tbl`"
+ }
+ val expectedCondition = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ "_LEGACY_ERROR_TEMP_1267"
+ } else {
+ "PARTITIONS_NOT_FOUND"
+ }
+ val expectedParameters = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ Map("tableIdentWithDB" -> expectedTableName)
+ } else {
+ Map("partitionList" -> "`c0`", "tableName" -> expectedTableName)
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"TRUNCATE TABLE $t PARTITION (c0=1)")
+ },
+ condition = expectedCondition,
+ parameters = expectedParameters
+ )
}
}
@@ -145,10 +174,20 @@ trait TruncateTableSuiteBase extends QueryTest with
DDLCommandTestUtils {
sql(s"INSERT INTO $t PARTITION (id=0) SELECT 'abc'")
sql(s"INSERT INTO $t PARTITION (id=1) SELECT 'def'")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- val errMsg = intercept[AnalysisException] {
- sql(s"TRUNCATE TABLE $t PARTITION (ID=1)")
- }.getMessage
- assert(errMsg.contains("ID is not a valid partition column"))
+ val expectedTableName = if (commandVersion ==
DDLCommandTestUtils.V1_COMMAND_VERSION) {
+ s"`$SESSION_CATALOG_NAME`.`ns`.`tbl`"
+ } else {
+ "`test_catalog`.`ns`.`tbl`"
+ }
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"TRUNCATE TABLE $t PARTITION (ID=1)")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`ID`",
+ "tableName" -> expectedTableName)
+ )
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
sql(s"TRUNCATE TABLE $t PARTITION (ID=1)")
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala
index 8f5af2e1f2e7..343a591fb558 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableSetLocationSuite.scala
@@ -93,8 +93,8 @@ trait AlterTableSetLocationSuiteBase extends
command.AlterTableSetLocationSuiteB
exception = intercept[AnalysisException] {
sql(s"ALTER TABLE $t PARTITION (A='1', B='2') SET LOCATION
'/path/to/part/ways3'")
},
- condition = "_LEGACY_ERROR_TEMP_1231",
- parameters = Map("key" -> "A", "tblName" ->
"`spark_catalog`.`ns`.`tbl`")
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map("partitionList" -> "`A`", "tableName" ->
"`spark_catalog`.`ns`.`tbl`")
)
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
index d66dca20d77b..5719fbee370a 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowTablesSuite.scala
@@ -53,8 +53,8 @@ class ShowTablesSuite extends command.ShowTablesSuiteBase
with CommandSuiteBase
catalog: String,
namespace: String,
table: String): (String, Map[String, String]) = {
- ("_LEGACY_ERROR_TEMP_1231",
- Map("key" -> "id", "tblName" -> s"`$catalog`.`$namespace`.`$table`"))
+ ("PARTITIONS_NOT_FOUND",
+ Map("partitionList" -> "`id`", "tableName" ->
s"`$catalog`.`$namespace`.`$table`"))
}
protected override def namespaceKey: String = "Namespace"
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
index 9c2f4461ff26..e2f0040afe57 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala
@@ -609,12 +609,15 @@ class StatisticsSuite extends
StatisticsCollectionTestBase with TestHiveSingleto
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
- val message = intercept[AnalysisException] {
- sql(s"ANALYZE TABLE $tableName PARTITION (DS='2010-01-01') COMPUTE
STATISTICS")
- }.getMessage
- assert(message.contains(
- "DS is not a valid partition column in table " +
- s"`$SESSION_CATALOG_NAME`.`default`.`$tableName`"))
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ANALYZE TABLE $tableName PARTITION (DS='2010-01-01') COMPUTE
STATISTICS")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`DS`",
+ "tableName" -> s"`$SESSION_CATALOG_NAME`.`default`.`$tableName`")
+ )
}
}
}
@@ -692,16 +695,26 @@ class StatisticsSuite extends
StatisticsCollectionTestBase with TestHiveSingleto
sql(s"INSERT INTO TABLE $tableName PARTITION (ds='2010-01-01') SELECT *
FROM src")
- assertAnalysisException(
- s"ANALYZE TABLE $tableName PARTITION (hour=20) COMPUTE STATISTICS",
- "hour is not a valid partition column in table " +
-
s"`$SESSION_CATALOG_NAME`.`default`.`${tableName.toLowerCase(Locale.ROOT)}`"
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ANALYZE TABLE $tableName PARTITION (hour=20) COMPUTE
STATISTICS")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`hour`",
+ "tableName" ->
+
s"`$SESSION_CATALOG_NAME`.`default`.`${tableName.toLowerCase(Locale.ROOT)}`")
)
- assertAnalysisException(
- s"ANALYZE TABLE $tableName PARTITION (hour) COMPUTE STATISTICS",
- "hour is not a valid partition column in table " +
-
s"`$SESSION_CATALOG_NAME`.`default`.`${tableName.toLowerCase(Locale.ROOT)}`"
+ checkError(
+ exception = intercept[AnalysisException] {
+ sql(s"ANALYZE TABLE $tableName PARTITION (hour) COMPUTE STATISTICS")
+ },
+ condition = "PARTITIONS_NOT_FOUND",
+ parameters = Map(
+ "partitionList" -> "`hour`",
+ "tableName" ->
+
s"`$SESSION_CATALOG_NAME`.`default`.`${tableName.toLowerCase(Locale.ROOT)}`")
)
intercept[NoSuchPartitionException] {
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 4efc159a3ed4..880d7bdc8224 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -676,10 +676,10 @@ class HiveDDLSuite
exception = intercept[AnalysisException] {
sql(s"ALTER TABLE $externalTab DROP PARTITION (ds='2008-04-09',
unknownCol='12')")
},
- condition = "_LEGACY_ERROR_TEMP_1231",
+ condition = "PARTITIONS_NOT_FOUND",
parameters = Map(
- "key" -> "unknownCol",
- "tblName" ->
s"`$SESSION_CATALOG_NAME`.`default`.`exttable_with_partitions`")
+ "partitionList" -> "`unknownCol`",
+ "tableName" ->
s"`$SESSION_CATALOG_NAME`.`default`.`exttable_with_partitions`")
)
sql(
diff --git
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala
index 9b50e8f05fca..de6af30e663d 100644
---
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala
+++
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowTablesSuite.scala
@@ -39,8 +39,8 @@ class ShowTablesSuite extends v1.ShowTablesSuiteBase with
CommandSuiteBase {
catalog: String,
namespace: String,
table: String): (String, Map[String, String]) = {
- ("_LEGACY_ERROR_TEMP_1231",
- Map("key" -> "id", "tblName" -> s"`$catalog`.`$namespace`.`$table`"))
+ ("PARTITIONS_NOT_FOUND",
+ Map("partitionList" -> "`id`", "tableName" ->
s"`$catalog`.`$namespace`.`$table`"))
}
protected override def extendedPartExpectedResult: String =
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]