This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-4.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push:
new 85024b56b882 [SPARK-49639][SQL] Remove the ANSI config suggestion in
INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION
85024b56b882 is described below
commit 85024b56b882fd793736808c791ac8be894d3e94
Author: Sakthi Vel <[email protected]>
AuthorDate: Sat Jan 25 12:01:27 2025 +0300
[SPARK-49639][SQL] Remove the ANSI config suggestion in
INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION
### What changes were proposed in this pull request?
Removal of ANSI turn off suggestion for
INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION error
### Why are the changes needed?
Now that in Spark 4.0.0 we have moved to ANSI mode on by default, we want
to keep suggestions of this kind to the minimum.
### Does this PR introduce _any_ user-facing change?
Yes, error message has changed.
### How was this patch tested?
Existing tests.
### Was this patch authored or co-authored using generative AI tooling?
No,
Closes #49655 from the-sakthi/SPARK-49639.
Authored-by: Sakthi Vel <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
(cherry picked from commit ff6b1a9fb705b867a0547fed272b2597ce25b46a)
Signed-off-by: Max Gekk <[email protected]>
---
common/utils/src/main/resources/error/error-conditions.json | 2 +-
.../main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 4 +---
.../apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala | 3 +--
.../scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala | 3 +--
4 files changed, 4 insertions(+), 8 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 302a0275491a..5b08a31a05e4 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2743,7 +2743,7 @@
},
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION" : {
"message" : [
- "Cannot add an interval to a date because its microseconds part is not
0. If necessary set <ansiConfig> to \"false\" to bypass this error."
+ "Cannot add an interval to a date because its microseconds part is not
0. To resolve this, cast the input date to a timestamp, which supports the
addition of intervals with non-zero microseconds."
],
"sqlState" : "22006"
},
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 69518b548653..f9f9b31a25aa 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -287,9 +287,7 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
}
def invalidIntervalWithMicrosecondsAdditionError():
SparkIllegalArgumentException = {
- new SparkIllegalArgumentException(
- errorClass = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
- messageParameters = Map("ansiConfig" ->
toSQLConf(SQLConf.ANSI_ENABLED.key)))
+ new SparkIllegalArgumentException(errorClass =
"INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION")
}
def overflowInSumOfDecimalError(
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 09650a0dcc02..2ddddad7a294 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -436,8 +436,7 @@ class DateExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
withSQLConf((SQLConf.ANSI_ENABLED.key, "true")) {
checkErrorInExpression[SparkIllegalArgumentException](
DateAddInterval(Literal(d), Literal(new CalendarInterval(1, 1, 25 *
MICROS_PER_HOUR))),
- "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
- Map("ansiConfig" -> "\"spark.sql.ansi.enabled\""))
+ "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION")
}
withSQLConf((SQLConf.ANSI_ENABLED.key, "false")) {
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 790c834d83e9..c253272e2bbb 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -542,8 +542,7 @@ class DateTimeUtilsSuite extends SparkFunSuite with
Matchers with SQLHelper {
checkError(
exception = intercept[SparkIllegalArgumentException](
dateAddInterval(input, new CalendarInterval(36, 47, 1))),
- condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION",
- parameters = Map("ansiConfig" -> "\"spark.sql.ansi.enabled\""))
+ condition = "INVALID_INTERVAL_WITH_MICROSECONDS_ADDITION")
}
test("timestamp add interval") {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]