Github user kiszk commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21053#discussion_r181687320
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -287,3 +287,106 @@ case class ArrayContains(left: Expression, right: 
Expression)
     
       override def prettyName: String = "array_contains"
     }
    +
    +/**
    + * Returns the value of index `right` in Array `left` or the value for key 
`right` in Map `left`.
    + */
    +@ExpressionDescription(
    +  usage = """
    +    _FUNC_(array, index) - Returns element of array at given index. If 
index < 0, accesses elements
    +      from the last to the first.
    +
    +    _FUNC_(map, key) - Returns value for given key, or NULL if the key is 
not contained in the map
    +  """,
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(1, 2, 3), 2);
    +       2
    +      > SELECT _FUNC_(map(1, 'a', 2, 'b'), 2);
    +       "b"
    +  """,
    +  since = "2.4.0")
    +case class ElementAt(left: Expression, right: Expression) extends 
GetMapValueUtil {
    +
    +  override def dataType: DataType = left.dataType match {
    +    case _: ArrayType => left.dataType.asInstanceOf[ArrayType].elementType
    +    case _: MapType => left.dataType.asInstanceOf[MapType].valueType
    +  }
    +
    +  override def inputTypes: Seq[AbstractDataType] = {
    +    Seq(TypeCollection(ArrayType, MapType),
    +      left.dataType match {
    +        case _: ArrayType => IntegerType
    +        case _: MapType => left.dataType.asInstanceOf[MapType].keyType
    +      }
    +    )
    +  }
    +
    +  override def nullable: Boolean = true
    +
    +  override def nullSafeEval(value: Any, ordinal: Any): Any = {
    +    left.dataType match {
    +      case _: ArrayType =>
    +        val array = value.asInstanceOf[ArrayData]
    +        val index = ordinal.asInstanceOf[Int]
    +        if (array.numElements() < math.abs(index)) {
    +          null
    +        } else {
    +          val idx = if (index == 0) {
    +            throw new ArrayIndexOutOfBoundsException("SQL array indices 
start at 1")
    +          } else if (index > 0) {
    +            index - 1
    +          } else {
    +            array.numElements() + index
    +          }
    +          if (array.isNullAt(idx)) {
    +            null
    +          } else {
    +            array.get(idx, dataType)
    +          }
    +        }
    +      case _: MapType =>
    +        getValueEval(value, ordinal, 
left.dataType.asInstanceOf[MapType].keyType)
    +    }
    +  }
    +
    +  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    left.dataType match {
    +      case _: ArrayType =>
    +        nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
    +          val index = ctx.freshName("elementAtIndex")
    +          val nullCheck = if 
(left.dataType.asInstanceOf[ArrayType].containsNull) {
    +            s"""
    +               |if ($eval1.isNullAt($index)) {
    +               |  ${ev.isNull} = true;
    +               |} else
    +             """
    +          } else {
    +            ""
    +          }
    +          s"""
    +             |int $index = (int) $eval2;
    +             |if ($eval1.numElements() < Math.abs($index)) {
    +             |  ${ev.isNull} = true;
    +             |} else {
    +             |  if ($index == 0) {
    +             |    throw new ArrayIndexOutOfBoundsException("SQL array 
indices start at 1");
    +             |  } else if ($index > 0) {
    +             |    $index--;
    +             |  } else {
    +             |    $index += $eval1.numElements();
    +             |  }
    +             |  $nullCheck
    +             |  {
    +             |    ${ev.value} = ${CodeGenerator.getValue(eval1, dataType, 
index)};
    +             |  }
    +             |}
    +           """
    --- End diff --
    
    good catch, thanks


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to