This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f824d058b14 [SPARK-42330][SQL] Assign the name `RULE_ID_NOT_FOUND` to 
the error class `_LEGACY_ERROR_TEMP_2175`
f824d058b14 is described below

commit f824d058b14e3c58b1c90f64fefc45fac105c7dd
Author: Koray Beyaz <koraybeya...@gmail.com>
AuthorDate: Thu Aug 3 10:57:26 2023 +0500

    [SPARK-42330][SQL] Assign the name `RULE_ID_NOT_FOUND` to the error class 
`_LEGACY_ERROR_TEMP_2175`
    
    ### What changes were proposed in this pull request?
    
    - Rename _LEGACY_ERROR_TEMP_2175 as RULE_ID_NOT_FOUND
    
    - Add a test case for the error class.
    
    ### Why are the changes needed?
    
    We are migrating onto error classes
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, the error message will include the error class name
    
    ### How was this patch tested?
    
    `testOnly *RuleIdCollectionSuite` and Github Actions
    
    Closes #40991 from kori73/SPARK-42330.
    
    Lead-authored-by: Koray Beyaz <koraybeya...@gmail.com>
    Co-authored-by: Koray Beyaz <koray.beya...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 common/utils/src/main/resources/error/error-classes.json      | 11 ++++++-----
 docs/sql-error-conditions.md                                  |  6 ++++++
 .../org/apache/spark/sql/errors/QueryExecutionErrors.scala    |  5 ++---
 .../apache/spark/sql/errors/QueryExecutionErrorsSuite.scala   | 11 +++++++++++
 4 files changed, 25 insertions(+), 8 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json 
b/common/utils/src/main/resources/error/error-classes.json
index a9619b97bd9..20f2ab4eb24 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -2471,6 +2471,12 @@
     ],
     "sqlState" : "42883"
   },
+  "RULE_ID_NOT_FOUND" : {
+    "message" : [
+      "Not found an id for the rule name \"<ruleName>\". Please modify 
RuleIdCollection.scala if you are adding a new rule."
+    ],
+    "sqlState" : "22023"
+  },
   "SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION" : {
     "message" : [
       "The correlated scalar subquery '<sqlExpr>' is neither present in GROUP 
BY, nor in an aggregate function. Add it to GROUP BY using ordinal position or 
wrap it in `first()` (or `first_value`) if you don't care which value you get."
@@ -5489,11 +5495,6 @@
       "<plan>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2175" : {
-    "message" : [
-      "Rule id not found for <ruleName>. Please modify RuleIdCollection.scala 
if you are adding a new rule."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2176" : {
     "message" : [
       "Cannot create array with <numElements> elements of data due to 
exceeding the limit <maxRoundedArrayLength> elements for ArrayData. 
<additionalErrorMessage>"
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index 161f3bdbef1..5609d60f974 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -1586,6 +1586,12 @@ The function `<routineName>` cannot be found. Verify the 
spelling and correctnes
 If you did not qualify the name with a schema and catalog, verify the 
current_schema() output, or qualify the name with the correct schema and 
catalog.
 To tolerate the error on drop use DROP FUNCTION IF EXISTS.
 
+### RULE_ID_NOT_FOUND
+
+[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception)
+
+Not found an id for the rule name "`<ruleName>`". Please modify 
RuleIdCollection.scala if you are adding a new rule.
+
 ### SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION
 
 SQLSTATE: none assigned
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 3622ffebb74..45b5d6b6692 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1584,9 +1584,8 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase with ExecutionE
 
   def ruleIdNotFoundForRuleError(ruleName: String): Throwable = {
     new SparkException(
-      errorClass = "_LEGACY_ERROR_TEMP_2175",
-      messageParameters = Map(
-        "ruleName" -> ruleName),
+      errorClass = "RULE_ID_NOT_FOUND",
+      messageParameters = Map("ruleName" -> ruleName),
       cause = null)
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index e70d04b7b5a..ae1c0a86a14 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -34,6 +34,7 @@ import org.apache.spark.sql.catalyst.expressions.{Grouping, 
Literal, RowNumber}
 import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode._
 import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext
 import org.apache.spark.sql.catalyst.expressions.objects.InitializeJavaBean
+import org.apache.spark.sql.catalyst.rules.RuleIdCollection
 import org.apache.spark.sql.catalyst.util.BadRecordException
 import org.apache.spark.sql.execution.datasources.jdbc.{DriverRegistry, 
JDBCOptions}
 import 
org.apache.spark.sql.execution.datasources.jdbc.connection.ConnectionProvider
@@ -499,6 +500,16 @@ class QueryExecutionErrorsSuite
     }
   }
 
+  test("SPARK-42330: rule id not found") {
+    checkError(
+      exception = intercept[SparkException] {
+          RuleIdCollection.getRuleId("incorrect")
+      },
+      errorClass = "RULE_ID_NOT_FOUND",
+      parameters = Map("ruleName" -> "incorrect")
+    )
+  }
+
   test("CANNOT_RESTORE_PERMISSIONS_FOR_PATH: can't set permission") {
       withTable("t") {
         withSQLConf(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to