Github user bersprockets commented on a diff in the pull request:
https://github.com/apache/spark/pull/21073#discussion_r182575704
--- Diff:
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
---
@@ -115,6 +116,62 @@ case class MapValues(child: Expression)
override def prettyName: String = "map_values"
}
+/**
+ * Returns the union of all the given maps.
+ */
+@ExpressionDescription(
+usage = "_FUNC_(map, ...) - Returns the union of all the given maps",
+examples = """
+ Examples:
+ > SELECT _FUNC_(map(1, 'a', 2, 'b'), map(2, 'c', 3, 'd'));
+ [[1 -> "a"], [2 -> "c"], [3 -> "d"]
+ """)
+case class MapConcat(children: Seq[Expression]) extends Expression
+ with CodegenFallback {
+
+ override def checkInputDataTypes(): TypeCheckResult = {
+ // this check currently does not allow valueContainsNull to vary,
+ // and unfortunately none of the MapType toString methods include
+ // valueContainsNull for the error message
+ if (children.exists(!_.dataType.isInstanceOf[MapType])) {
+ TypeCheckResult.TypeCheckFailure(
+ s"The given input of function $prettyName should all be of type
map, " +
+ "but they are " +
children.map(_.dataType.simpleString).mkString("[", ", ", "]"))
+ } else if (children.map(_.dataType).distinct.length > 1) {
+ TypeCheckResult.TypeCheckFailure(
+ s"The given input maps of function $prettyName should all be the
same type, " +
+ "but they are " +
children.map(_.dataType.simpleString).mkString("[", ", ", "]"))
+ } else {
+ TypeCheckResult.TypeCheckSuccess
+ }
+ }
+ override def dataType: MapType = {
+ children.headOption.map(_.dataType.asInstanceOf[MapType])
+ .getOrElse(MapType(keyType = StringType, valueType = StringType))
+ }
+
+ override def nullable: Boolean = false
--- End diff --
@henryr Another quick test of Presto also shows that if *any* input is
NULL, the result is NULL:
<pre>
presto:default> SELECT map_concat(NULL, map(ARRAY[1,3], ARRAY[2,4]));
_col0
-------
NULL
(1 row)
</pre>
Looks like I need to check if any input is NULL.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]