This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f541301b768 [SPARK-42844][SQL] Update the error class 
`_LEGACY_ERROR_TEMP_2008` to `INVALID_URL`
f541301b768 is described below

commit f541301b7680d96611796d92943d4ec72c71ec0d
Author: Liang Yan <ckgppl_...@sina.cn>
AuthorDate: Thu Apr 6 10:10:34 2023 +0300

    [SPARK-42844][SQL] Update the error class `_LEGACY_ERROR_TEMP_2008` to 
`INVALID_URL`
    
    ### What changes were proposed in this pull request?
    Update the error_class _LEGACY_ERROR_TEMP_2008 to INVALID_URL.
    
    ### Why are the changes needed?
    
    Fix jira issue SPARK-42844. The original name just a number, update it to a 
informal name.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Add a test case in UrlFunctionsSuite to catch the error using sql command.
    
    Closes #40657 from liang3zy22/spark42844.
    
    Authored-by: Liang Yan <ckgppl_...@sina.cn>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 core/src/main/resources/error/error-classes.json          | 10 +++++-----
 .../apache/spark/sql/errors/QueryExecutionErrors.scala    |  2 +-
 .../sql/catalyst/expressions/StringExpressionsSuite.scala | 10 ++--------
 .../scala/org/apache/spark/sql/UrlFunctionsSuite.scala    | 15 +++++++++++++++
 4 files changed, 23 insertions(+), 14 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index d43996f6a49..79c6f3e6d82 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -1068,6 +1068,11 @@
     ],
     "sqlState" : "42604"
   },
+  "INVALID_URL" : {
+    "message" : [
+      "The url is invalid: <url>. If necessary set <ansiConfig> to \"false\" 
to bypass this error."
+    ]
+  },
   "INVALID_WHERE_CONDITION" : {
     "message" : [
       "The WHERE condition <condition> contains invalid expressions: 
<expressionList>.",
@@ -3721,11 +3726,6 @@
       "Regex group count is <groupCount>, but the specified group index is 
<groupIndex>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2008" : {
-    "message" : [
-      "Find an invalid url string <url>. If necessary set <ansiConfig> to 
false to bypass this error."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2010" : {
     "message" : [
       "Window Functions do not support merging."
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index ad0796e4eb1..7ae9d55f96c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -369,7 +369,7 @@ private[sql] object QueryExecutionErrors extends 
QueryErrorsBase {
 
   def invalidUrlError(url: UTF8String, e: URISyntaxException): 
SparkIllegalArgumentException = {
     new SparkIllegalArgumentException(
-      errorClass = "_LEGACY_ERROR_TEMP_2008",
+      errorClass = "INVALID_URL",
       messageParameters = Map(
         "url" -> url.toString,
         "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)),
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
index 017f4483e88..9c97508b7a8 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala
@@ -1832,16 +1832,10 @@ class StringExpressionsSuite extends SparkFunSuite with 
ExpressionEvalHelper {
   }
 
   test("SPARK-33468: ParseUrl in ANSI mode should fail if input string is not 
a valid url") {
-    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
-      val msg = intercept[IllegalArgumentException] {
-        evaluateWithoutCodegen(
-          ParseUrl(Seq("https://a.b.c/index.php?params1=a|b&params2=x", 
"HOST")))
-      }.getMessage
-      assert(msg.contains("Find an invalid url string"))
-    }
+    val url = "https://a.b.c/index.php?params1=a|b&params2=x"
     withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
       checkEvaluation(
-        ParseUrl(Seq("https://a.b.c/index.php?params1=a|b&params2=x", 
"HOST")), null)
+        ParseUrl(Seq(url, "HOST")), null)
     }
   }
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/UrlFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/UrlFunctionsSuite.scala
index 85f0d70df7b..50b64c55552 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UrlFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UrlFunctionsSuite.scala
@@ -17,6 +17,8 @@
 
 package org.apache.spark.sql
 
+import org.apache.spark.{SparkException, SparkThrowable}
+import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLConf
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SharedSparkSession
 
@@ -67,6 +69,19 @@ class UrlFunctionsSuite extends QueryTest with 
SharedSparkSession {
         "inva lid://user:pass@host/file;param?query;p2",
         Row(null, null, null, null, null, null, null, null, null))
     }
+
+    withSQLConf(SQLConf.ANSI_ENABLED.key -> "true") {
+      val url = "inva lid://user:pass@host/file;param?query;p2"
+      checkError(
+        exception = intercept[SparkException] {
+          sql(s"SELECT parse_url('$url', 'HOST')").collect()
+        }.getCause.asInstanceOf[SparkThrowable],
+        errorClass = "INVALID_URL",
+        parameters = Map(
+          "url" -> url,
+          "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)
+        ))
+    }
   }
 
   test("url encode/decode function") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to