This is an automated email from the ASF dual-hosted git repository.
sunchao pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.2 by this push:
new 49b830c [SPARK-37633][SQL] Unwrap cast should skip if downcast failed
with an…
49b830c is described below
commit 49b830c866802c3c871bc50dce4139ed5610edc5
Author: manuzhang <[email protected]>
AuthorDate: Wed Dec 15 11:32:06 2021 -0800
[SPARK-37633][SQL] Unwrap cast should skip if downcast failed with an…
### What changes were proposed in this pull request?
Use non-ANSI cast when applying `UnwrapCastInBinaryComparison` rule.
### Why are the changes needed?
Since `UnwrapCastInBinaryComparison` is an optimizer rule, it should not
fail the application in cast.
### Does this PR introduce _any_ user-facing change?
With `spark.sql.ansi.enabled=true`, application won't fail if downcast fail
when applying `UnwrapCastInBinaryComparison` rule.
### How was this patch tested?
Update UT.
Closes #34888 from manuzhang/spark-37633.
Authored-by: manuzhang <[email protected]>
Signed-off-by: Chao Sun <[email protected]>
(cherry picked from commit 71d4b277f742ba0d487a26082f2ea4b342c498cd)
Signed-off-by: Chao Sun <[email protected]>
---
.../sql/catalyst/optimizer/UnwrapCastInBinaryComparison.scala | 2 +-
.../catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala | 9 +++++++--
2 files changed, 8 insertions(+), 3 deletions(-)
diff --git
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparison.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparison.scala
index 08c4cbf..6d7b6e5 100644
---
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparison.scala
+++
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparison.scala
@@ -306,7 +306,7 @@ object UnwrapCastInBinaryComparison extends
Rule[LogicalPlan] {
// decimal type), or that the literal `value` is within range `(min,
max)`. For these, we
// optimize by moving the cast to the literal side.
- val newValue = Cast(Literal(value), fromType).eval()
+ val newValue = Cast(Literal(value), fromType, ansiEnabled = false).eval()
if (newValue == null) {
// This means the cast failed, for instance, due to the value is not
representable in the
// narrower type. In this case we simply return the original expression.
diff --git
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala
index 31f62cf..ac3dca6 100644
---
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala
+++
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala
@@ -28,6 +28,7 @@ import
org.apache.spark.sql.catalyst.optimizer.UnwrapCastInBinaryComparison._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.RuleExecutor
+import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
class UnwrapCastInBinaryComparisonSuite extends PlanTest with
ExpressionEvalHelper {
@@ -202,8 +203,12 @@ class UnwrapCastInBinaryComparisonSuite extends PlanTest
with ExpressionEvalHelp
}
test("unwrap casts should skip if downcast failed") {
- val decimalValue = decimal2(123456.1234)
- assertEquivalent(castDecimal2(f3) === decimalValue, castDecimal2(f3) ===
decimalValue)
+ Seq("true", "false").foreach { ansiEnabled =>
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> ansiEnabled) {
+ val decimalValue = decimal2(123456.1234)
+ assertEquivalent(castDecimal2(f3) === decimalValue, castDecimal2(f3)
=== decimalValue)
+ }
+ }
}
test("unwrap cast should skip if cannot coerce type") {
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]