viirya commented on code in PR #52216: URL: https://github.com/apache/spark/pull/52216#discussion_r2319334514
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala: ########## @@ -507,16 +490,19 @@ case class GetMapValue(child: Expression, key: Expression) private[catalyst] def keyType = child.dataType.asInstanceOf[MapType].keyType - override def checkInputDataTypes(): TypeCheckResult = { - super.checkInputDataTypes() match { - case f if f.isFailure => f - case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(keyType, prettyName) - } + override def checkInputDataTypes(): TypeCheckResult = child.dataType match { + case _: MapType => + super.checkInputDataTypes() match { + case f if f.isFailure => f + case TypeCheckResult.TypeCheckSuccess => + TypeUtils.checkForOrderingExpr(keyType, prettyName) + } + // This should never happen, unless we hit a bug. + case other => TypeCheckResult.TypeCheckFailure( + "GetMapValue.child must be map type, but got " + other) } - // We have done type checking for child in `ExtractValue`, so only need to check the `key`. - override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType, keyType) + override def inputTypes: Seq[AbstractDataType] = Seq(MapType, keyType) Review Comment: Hmm, if it is possible that the child data type could be other than MapType, the `keyType` cannot directly call `asInstanceOf` to cast to MapType, because when we call `inputTypes`, the child data type is not done checking type. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org