cloud-fan commented on code in PR #52016:
URL: https://github.com/apache/spark/pull/52016#discussion_r2416928422


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TableOutputResolver.scala:
##########
@@ -505,60 +428,47 @@ object TableOutputResolver extends SQLConfHelper with 
Logging {
       expectedType: MapType,
       byName: Boolean,
       conf: SQLConf,
-      addError: String => Unit,
-      colPath: Seq[String]): Option[NamedExpression] = {
-    val nullCheckedInput = checkNullability(input, expected, conf, colPath)
+      colPath: Seq[String]): Expression = {
+    val nullCheckedInput = withNullabilityChecked(input, expected, conf, 
colPath)
 
     val keyParam = NamedLambdaVariable("key", inputType.keyType, nullable = 
false)
     val fakeKeyAttr = AttributeReference("key", expectedType.keyType, nullable 
= false)()
-    val resKey = if (byName) {
-      reorderColumnsByName(tableName, Seq(keyParam), Seq(fakeKeyAttr), conf, 
addError, colPath)
-    } else {
-      resolveColumnsByPosition(tableName, Seq(keyParam), Seq(fakeKeyAttr), 
conf, addError, colPath)
-    }
+    val resKey = resolveColumns(tableName, Seq(keyParam), Seq(fakeKeyAttr),
+      conf, byName, fillDefaultValue = false, colPath)
 
     val valueParam =
       NamedLambdaVariable("value", inputType.valueType, 
inputType.valueContainsNull)
     val fakeValueAttr =
       AttributeReference("value", expectedType.valueType, 
expectedType.valueContainsNull)()
-    val resValue = if (byName) {
-      reorderColumnsByName(tableName, Seq(valueParam), Seq(fakeValueAttr), 
conf, addError, colPath)
-    } else {
-      resolveColumnsByPosition(
-        tableName, Seq(valueParam), Seq(fakeValueAttr), conf, addError, 
colPath)
-    }
+    val resValue = resolveColumns(tableName, Seq(valueParam), 
Seq(fakeValueAttr),
+      conf, byName, fillDefaultValue = false, colPath)
 
-    if (resKey.length == 1 && resValue.length == 1) {
-      // If the key and value expressions have not changed, we just check 
original map field.
-      // Otherwise, we construct a new map by adding transformations to the 
keys and values.
-      val casted =
-        if (resKey.head == keyParam && resValue.head == valueParam) {
-          nullCheckedInput
-        } else {
-          val newKeys = if (resKey.head != keyParam) {
-            val keyFunc = LambdaFunction(resKey.head, Seq(keyParam))
-            ArrayTransform(MapKeys(nullCheckedInput), keyFunc)
-          } else {
-            MapKeys(nullCheckedInput)
-          }
-          val newValues = if (resValue.head != valueParam) {
-            val valueFunc = LambdaFunction(resValue.head, Seq(valueParam))
-            ArrayTransform(MapValues(nullCheckedInput), valueFunc)
-          } else {
-            MapValues(nullCheckedInput)
-          }
-          MapFromArrays(newKeys, newValues)
-        }
-      Some(applyColumnMetadata(casted, expected))
+    // If the key and value expressions have not changed, we just check 
original map field.
+    // Otherwise, we construct a new map by adding transformations to the keys 
and values.
+    if (resKey.head == keyParam && resValue.head == valueParam) {
+      nullCheckedInput
     } else {
-      None
+      // todo: can't we use a cast or cast-like function instead of the 
transforms?

Review Comment:
   ditto as https://github.com/apache/spark/pull/52016/files#r2416927216



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to