This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 56593f4fb77b [SPARK-49637][SQL] Changed error message for
INVALID_FRACTION_OF_SECOND
56593f4fb77b is described below
commit 56593f4fb77ba90f2c6df1450adf91bb9cf667af
Author: Marko Nikacevic <[email protected]>
AuthorDate: Tue Oct 29 14:37:24 2024 +0100
[SPARK-49637][SQL] Changed error message for INVALID_FRACTION_OF_SECOND
### What changes were proposed in this pull request?
In this PR, the error message for INVALID_FRACTION_OF_SECOND is changed,
such that it no longer suggests turning off ANSI flag. Now, the error suggests
using try_ variants of functions for making timestamps. This PR will is done in
conjunction with https://github.com/apache/spark/pull/48624, where the try_
variants of these functions are implemented.
### Why are the changes needed?
These changes are needed as part of the effort to move to ANSI by default.
### Does this PR introduce _any_ user-facing change?
Yes, the error message is changed.
### How was this patch tested?
Tests not needed.
### Was this patch authored or co-authored using generative AI tooling?
No
Closes #48656 from markonik-db/SPARK-49637-INVALID-FRACTION-OF-SECOND.
Authored-by: Marko Nikacevic <[email protected]>
Signed-off-by: Max Gekk <[email protected]>
---
common/utils/src/main/resources/error/error-conditions.json | 3 ++-
.../apache/spark/sql/catalyst/expressions/datetimeExpressions.scala | 4 ++--
.../scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 4 ++--
.../spark/sql/catalyst/expressions/DateExpressionsSuite.scala | 2 +-
.../src/test/resources/sql-tests/results/ansi/timestamp.sql.out | 2 +-
.../resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out | 2 +-
.../org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 6 ++++--
7 files changed, 13 insertions(+), 10 deletions(-)
diff --git a/common/utils/src/main/resources/error/error-conditions.json
b/common/utils/src/main/resources/error/error-conditions.json
index 68ccf7244cc3..a09a05de1a45 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -2413,7 +2413,8 @@
},
"INVALID_FRACTION_OF_SECOND" : {
"message" : [
- "The fraction of sec must be zero. Valid range is [0, 60]. If necessary
set <ansiConfig> to \"false\" to bypass this error."
+ "Valid range for seconds is [0, 60] (inclusive), but the provided value
is <secAndMicros>. To avoid this error, use `try_make_timestamp`, which returns
NULL on error.",
+ "If you do not want to use the session default timestamp version of this
function, use `try_make_timestamp_ntz` or `try_make_timestamp_ltz`."
],
"sqlState" : "22023"
},
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 7f615dbc245b..dd20418496ca 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -2808,7 +2808,7 @@ case class MakeTimestamp(
// This case of sec = 60 and nanos = 0 is supported for
compatibility with PostgreSQL
LocalDateTime.of(year, month, day, hour, min, 0, 0).plusMinutes(1)
} else {
- throw QueryExecutionErrors.invalidFractionOfSecondError()
+ throw QueryExecutionErrors.invalidFractionOfSecondError(secAndMicros)
}
} else {
LocalDateTime.of(year, month, day, hour, min, seconds, nanos)
@@ -2879,7 +2879,7 @@ case class MakeTimestamp(
ldt = java.time.LocalDateTime.of(
$year, $month, $day, $hour, $min, 0, 0).plusMinutes(1);
} else {
- throw QueryExecutionErrors.invalidFractionOfSecondError();
+ throw
QueryExecutionErrors.invalidFractionOfSecondError($secAndNanos);
}
} else {
ldt = java.time.LocalDateTime.of($year, $month, $day, $hour, $min,
seconds, nanos);
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 0aed8e604bd9..2cc223ba69fa 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -257,11 +257,11 @@ private[sql] object QueryExecutionErrors extends
QueryErrorsBase with ExecutionE
summary = "")
}
- def invalidFractionOfSecondError(): DateTimeException = {
+ def invalidFractionOfSecondError(secAndMicros: Decimal): DateTimeException =
{
new SparkDateTimeException(
errorClass = "INVALID_FRACTION_OF_SECOND",
messageParameters = Map(
- "ansiConfig" -> toSQLConf(SQLConf.ANSI_ENABLED.key)
+ "secAndMicros" -> s"$secAndMicros"
),
context = Array.empty,
summary = "")
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 21ae35146282..05d68504a727 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -1202,7 +1202,7 @@ class DateExpressionsSuite extends SparkFunSuite with
ExpressionEvalHelper {
Literal(23), Literal(59), Literal(Decimal(BigDecimal(60.0), 16,
6)))
if (ansi) {
checkExceptionInExpression[DateTimeException](makeTimestampExpr.copy(sec =
Literal(
- Decimal(BigDecimal(60.5), 16, 6))), EmptyRow, "The fraction of
sec must be zero")
+ Decimal(BigDecimal(60.5), 16, 6))), EmptyRow, "Valid range for
seconds is [0, 60]")
} else {
checkEvaluation(makeTimestampExpr, expectedAnswer("2019-07-01
00:00:00"))
}
diff --git
a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index d7a58e321b0f..c64bd2ff57e1 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -126,7 +126,7 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "INVALID_FRACTION_OF_SECOND",
"sqlState" : "22023",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\""
+ "secAndMicros" : "60.007000"
}
}
diff --git
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index cd94674d2bf2..482a1efb6b09 100644
---
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -126,7 +126,7 @@ org.apache.spark.SparkDateTimeException
"errorClass" : "INVALID_FRACTION_OF_SECOND",
"sqlState" : "22023",
"messageParameters" : {
- "ansiConfig" : "\"spark.sql.ansi.enabled\""
+ "secAndMicros" : "60.007000"
}
}
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 2e0983fe0319..3e896ae69b68 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -97,11 +97,13 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest
test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") {
checkError(
exception = intercept[SparkDateTimeException] {
- sql("select make_timestamp(2012, 11, 30, 9, 19,
60.66666666)").collect()
+ sql("select make_timestamp(2012, 11, 30, 9, 19, 60.1)").collect()
},
condition = "INVALID_FRACTION_OF_SECOND",
sqlState = "22023",
- parameters = Map("ansiConfig" -> ansiConf))
+ parameters = Map(
+ "secAndMicros" -> "60.100000"
+ ))
}
test("NUMERIC_VALUE_OUT_OF_RANGE: cast string to decimal") {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]