Github user mn-mikke commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20938#discussion_r180070286
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -287,3 +289,165 @@ case class ArrayContains(left: Expression, right: 
Expression)
     
       override def prettyName: String = "array_contains"
     }
    +
    +/**
    + * Transforms an array of arrays into a single array.
    + */
    +@ExpressionDescription(
    +  usage = "_FUNC_(arrayOfArrays) - Transforms an array of arrays into a 
single array.",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(array(1, 2), array(3, 4));
    +       [1,2,3,4]
    +  """)
    +case class Flatten(child: Expression) extends UnaryExpression {
    +
    +  override def nullable: Boolean = child.nullable || dataType.containsNull
    +
    +  override def dataType: ArrayType = {
    +    child
    +      .dataType.asInstanceOf[ArrayType]
    +      .elementType.asInstanceOf[ArrayType]
    +  }
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    if (
    +      ArrayType.acceptsType(child.dataType) &&
    +      
ArrayType.acceptsType(child.dataType.asInstanceOf[ArrayType].elementType)
    +    ) {
    +      TypeCheckResult.TypeCheckSuccess
    +    } else {
    +      TypeCheckResult.TypeCheckFailure(
    +        s"The argument should be an array of arrays, " +
    +        s"but '${child.sql}' is of ${child.dataType.simpleString} type."
    +      )
    +    }
    +  }
    +
    +  override def nullSafeEval(array: Any): Any = {
    +    val elements = array.asInstanceOf[ArrayData].toObjectArray(dataType)
    +
    +    if (elements.contains(null)) {
    +      null
    --- End diff --
    
    Yes, you are right. The function also behaves the same way when codegen is 
applied. See test cases with a null array in `CollectionExpressionsSuite`.
    
    We can discuss whether the function should behave the same way as in Presto 
and just ignore null elements... But I think that the current approach fits 
more into the semantics of Spark functions.
    ```
    concat("a",null,"c") => null
    1 + null => null
    ...
    ```
     


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to