cloud-fan commented on code in PR #52016:
URL: https://github.com/apache/spark/pull/52016#discussion_r2416936622
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TableOutputResolver.scala:
##########
@@ -591,83 +501,49 @@ object TableOutputResolver extends SQLConfHelper with
Logging {
private def canCauseCastOverflow(cast: Cast): Boolean = {
containsIntegralOrDecimalType(cast.dataType) &&
- !Cast.canUpCast(cast.child.dataType, cast.dataType)
+ !Cast.canUpCast(cast.child.dataType, cast.dataType)
}
- private def checkField(
- tableName: String,
- tableAttr: Attribute,
- queryExpr: NamedExpression,
- byName: Boolean,
- conf: SQLConf,
- addError: String => Unit,
- colPath: Seq[String]): Option[NamedExpression] = {
-
- val attrTypeHasCharVarchar =
CharVarcharUtils.hasCharVarchar(tableAttr.dataType)
- val attrTypeWithoutCharVarchar = if (attrTypeHasCharVarchar) {
- CharVarcharUtils.replaceCharVarcharWithString(tableAttr.dataType)
+ private def unwrapUDT(expr: Expression): Expression = {
+ if (!expr.dataType.existsRecursively(_.isInstanceOf[UserDefinedType[_]])) {
+ // todo: this is now N^2 in the type depth. We could instead check the
+ // return value is eq to expr.
Review Comment:
unfortunately there is no better way. what do you mean by "return value is
eq to expr"?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]