This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new a053b40ac0e9 [SPARK-47099][SQL][FOLLOW-UP] Uses ordinalNumber in
UNEXPECTED_INPUT_TYPE
a053b40ac0e9 is described below
commit a053b40ac0e95b0eace2cb4da5e6e79e7019793b
Author: Hyukjin Kwon <[email protected]>
AuthorDate: Fri Feb 23 14:12:10 2024 +0900
[SPARK-47099][SQL][FOLLOW-UP] Uses ordinalNumber in UNEXPECTED_INPUT_TYPE
### What changes were proposed in this pull request?
This PR is a followup of https://github.com/apache/spark/pull/45177 that
fixes some leftovers missed.
### Why are the changes needed?
For consistency. Also, I think this fixes the Maven build failure:
https://github.com/apache/spark/actions/runs/8005710953/job/21865798408
### Does this PR introduce _any_ user-facing change?
Yes, the value of 'paramIndex' for the error class `UNEXPECTED-INPUT-TYPE`
is uniformly set by `ordinalNumber`.
### How was this patch tested?
CI in this PR.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #45225 from HyukjinKwon/SPARK-47099-followup.
Authored-by: Hyukjin Kwon <[email protected]>
Signed-off-by: Hyukjin Kwon <[email protected]>
---
.../scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala | 2 +-
sql/core/src/test/resources/sql-tests/analyzer-results/mode.sql.out | 2 +-
.../sql-tests/analyzer-results/table-valued-functions.sql.out | 4 ++--
sql/core/src/test/resources/sql-tests/results/mode.sql.out | 2 +-
.../test/resources/sql-tests/results/table-valued-functions.sql.out | 4 ++--
sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala | 2 +-
6 files changed, 8 insertions(+), 8 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index e96474862b1d..3ab6c22e5fda 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -1966,7 +1966,7 @@ private[sql] object QueryCompilationErrors extends
QueryErrorsBase with Compilat
new AnalysisException(
errorClass = "UNEXPECTED_INPUT_TYPE",
messageParameters = Map(
- "paramIndex" -> paramIndex.toString,
+ "paramIndex" -> ordinalNumber(paramIndex - 1),
"functionName" -> toSQLId(functionName),
"requiredType" -> toSQLType(dataType),
"inputSql" -> toSQLExpr(expression),
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/mode.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/mode.sql.out
index a0a0c81ef027..2508b9b5fdd9 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/mode.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/mode.sql.out
@@ -124,7 +124,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`mode`",
"inputSql" : "\"true\"",
"inputType" : "\"STRING\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BOOLEAN\""
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
index 765de505d9a2..c8698f7c7cd7 100644
---
a/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
+++
b/sql/core/src/test/resources/sql-tests/analyzer-results/table-valued-functions.sql.out
@@ -81,7 +81,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`range`",
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
@@ -105,7 +105,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`range`",
"inputSql" : "\"array(1, 2, 3)\"",
"inputType" : "\"ARRAY<INT>\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
diff --git a/sql/core/src/test/resources/sql-tests/results/mode.sql.out
b/sql/core/src/test/resources/sql-tests/results/mode.sql.out
index 6ae7b2d29e9d..9eac2c40e3ee 100644
--- a/sql/core/src/test/resources/sql-tests/results/mode.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/mode.sql.out
@@ -103,7 +103,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`mode`",
"inputSql" : "\"true\"",
"inputType" : "\"STRING\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BOOLEAN\""
},
"queryContext" : [ {
diff --git
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
index 55f96ce95416..768f0e8c010e 100644
---
a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out
@@ -112,7 +112,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`range`",
"inputSql" : "\"NULL\"",
"inputType" : "\"VOID\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
@@ -138,7 +138,7 @@ org.apache.spark.sql.AnalysisException
"functionName" : "`range`",
"inputSql" : "\"array(1, 2, 3)\"",
"inputType" : "\"ARRAY<INT>\"",
- "paramIndex" : "2",
+ "paramIndex" : "second",
"requiredType" : "\"BIGINT\""
},
"queryContext" : [ {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala
index 6daf89173f40..465afc7e2006 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CollationSuite.scala
@@ -80,7 +80,7 @@ class CollationSuite extends QueryTest with
SharedSparkSession {
sqlState = "42K09",
Map(
"functionName" -> "`collate`",
- "paramIndex" -> "1",
+ "paramIndex" -> "first",
"inputSql" -> "\"123\"",
"inputType" -> "\"INT\"",
"requiredType" -> "\"STRING\""),
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]