maropu commented on a change in pull request #28593:
URL: https://github.com/apache/spark/pull/28593#discussion_r430744366
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
+ s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}," +
+ s",you can enable the casting by setting" +
+ s"spark.sql.legacy.allowCastNumericToTimestamp =true;" +
Review comment:
`by setting spark.sql.legacy.allowCastNumericToTimestamp =true;` -> `by
setting this value to true.`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -59,7 +59,8 @@ object Cast {
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
- case (_: NumericType, TimestampType) => true
+ case (_: NumericType, TimestampType) =>
Review comment:
nit: ` case (_: NumericType, TimestampType) =>
SQLConf.get.allowCastNumericToTimestamp?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -59,7 +59,8 @@ object Cast {
case (StringType, TimestampType) => true
case (BooleanType, TimestampType) => true
case (DateType, TimestampType) => true
- case (_: NumericType, TimestampType) => true
+ case (_: NumericType, TimestampType) =>
Review comment:
nit: ` case (_: NumericType, TimestampType) =>
SQLConf.get.allowCastNumericToTimestamp`?
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
+ s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}," +
+ s",you can enable the casting by setting" +
Review comment:
`,you` -> `, you`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
+ s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}," +
+ s",you can enable the casting by setting" +
Review comment:
And, plz remove unnecessary `s` in the head.
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
+ s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}," +
+ s",you can enable the casting by setting" +
+ s"spark.sql.legacy.allowCastNumericToTimestamp =true;" +
+ s"but we strongly recommand using function" +
+ s"TIMESTAMP_SECONDS/TIMESTAMP_MILLIS/TIMESTAMP_MICROS instand"
Review comment:
`instand` -> `instead`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
+ s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}," +
+ s",you can enable the casting by setting" +
+ s"spark.sql.legacy.allowCastNumericToTimestamp =true;" +
Review comment:
`spark.sql.legacy.allowCastNumericToTimestamp` ->
`${SQLConf.LEGACY_AllOW_CAST_NUMERIC_TO_TIMESTAMP.key}`
##########
File path:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
##########
@@ -266,7 +267,15 @@ abstract class CastBase extends UnaryExpression with
TimeZoneAwareExpression wit
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(
- s"cannot cast ${child.dataType.catalogString} to
${dataType.catalogString}")
+ if (child.dataType.isInstanceOf[NumericType] &&
dataType.isInstanceOf[TimestampType]) {
Review comment:
btw, we need the check here? I think its okay just to update our
migration guide.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]