Github user kiszk commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21045#discussion_r191260626
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -127,6 +127,165 @@ case class MapKeys(child: Expression)
       override def prettyName: String = "map_keys"
     }
     
    +@ExpressionDescription(
    +  usage = """_FUNC_(a1, a2, ...) - Returns a merged array containing in 
the N-th position the
    +  N-th value of each array given.""",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(1, 2, 3), array(2, 3, 4));
    +        [[1, 2], [2, 3], [3, 4]]
    +      > SELECT _FUNC_(array(1, 2), array(2, 3), array(3, 4));
    +        [[1, 2, 3], [2, 3, 4]]
    +  """,
    +  since = "2.4.0")
    +case class Zip(children: Seq[Expression]) extends Expression with 
ExpectsInputTypes {
    +
    +  override def inputTypes: Seq[AbstractDataType] = 
Seq.fill(children.length)(ArrayType)
    +
    +  override def dataType: DataType = ArrayType(mountSchema)
    +
    +  override def nullable: Boolean = children.forall(_.nullable)
    +
    +  private lazy val arrayTypes = 
children.map(_.dataType.asInstanceOf[ArrayType])
    +
    +  private lazy val arrayElementTypes = arrayTypes.map(_.elementType)
    +
    +  def mountSchema: StructType = {
    +    val fields = children.zip(arrayElementTypes).zipWithIndex.map {
    +      case ((expr: NamedExpression, elementType), _) =>
    +        StructField(expr.name, elementType, nullable = true)
    +      case ((_, elementType), idx) =>
    +        StructField(s"$idx", elementType, nullable = true)
    +    }
    +    StructType(fields)
    +  }
    +
    +  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    val numberOfArrays: Int = children.length
    +    val genericArrayData = classOf[GenericArrayData].getName
    +    val genericInternalRow = classOf[GenericInternalRow].getName
    +    val arrVals = ctx.freshName("arrVals")
    +    val arrCardinality = ctx.freshName("arrCardinality")
    +    val biggestCardinality = ctx.freshName("biggestCardinality")
    +    val storedArrTypes = ctx.freshName("storedArrTypes")
    +    val returnNull = ctx.freshName("returnNull")
    +    val evals = children.map(_.genCode(ctx))
    +
    +    val inputs = evals.zipWithIndex.map { case (eval, index) =>
    +      s"""
    +        |${eval.code}
    +        |if (!${eval.isNull}) {
    +        |  $arrVals[$index] = ${eval.value};
    +        |  $arrCardinality[$index] = ${eval.value}.numElements();
    +        |} else {
    +        |  $arrVals[$index] = null;
    +        |  $arrCardinality[$index] = 0;
    +        |  $returnNull[0] = true;
    +        |}
    +        |$storedArrTypes[$index] = "${arrayElementTypes(index)}";
    --- End diff --
    
    In simple cases, since we know only a data type of all children before 
execution, we may not need to use `$storedArrTypes`. However, I may miss 
something. 
    
    Would it be possible to show an example test case that requires to pick the 
correct `getValue` by using `$storedArrTypes`?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to