Github user ueshin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21282#discussion_r193958595
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 ---
    @@ -308,6 +309,234 @@ case class MapEntries(child: Expression) extends 
UnaryExpression with ExpectsInp
       override def prettyName: String = "map_entries"
     }
     
    +/**
    + * Returns a map created from the given array of entries.
    + */
    +@ExpressionDescription(
    +  usage = "_FUNC_(arrayOfEntries) - Returns a map created from the given 
array of entries.",
    +  examples = """
    +    Examples:
    +      > SELECT _FUNC_(array(struct(1, 'a'), struct(2, 'b')));
    +       {1:"a",2:"b"}
    +  """,
    +  since = "2.4.0")
    +case class MapFromEntries(child: Expression) extends UnaryExpression {
    +
    +  @transient
    +  private lazy val dataTypeDetails: Option[(MapType, Boolean, Boolean)] = 
child.dataType match {
    +    case ArrayType(
    +      StructType(Array(
    +        StructField(_, keyType, keyNullable, _),
    +        StructField(_, valueType, valueNullable, _))),
    +      containsNull) => Some((MapType(keyType, valueType, valueNullable), 
keyNullable, containsNull))
    +    case _ => None
    +  }
    +
    +  private def nullEntries: Boolean = dataTypeDetails.get._3
    +
    +  override def dataType: MapType = dataTypeDetails.get._1
    +
    +  override def checkInputDataTypes(): TypeCheckResult = dataTypeDetails 
match {
    +    case Some(_) => TypeCheckResult.TypeCheckSuccess
    +    case None => TypeCheckResult.TypeCheckFailure(s"'${child.sql}' is of " 
+
    +      s"${child.dataType.simpleString} type. $prettyName accepts only 
arrays of pair structs.")
    +  }
    +
    +  override protected def nullSafeEval(input: Any): Any = {
    +    val arrayData = input.asInstanceOf[ArrayData]
    +    val length = arrayData.numElements()
    +    val numEntries = if (nullEntries) (0 until 
length).count(!arrayData.isNullAt(_)) else length
    +    val keyArray = new Array[AnyRef](numEntries)
    +    val valueArray = new Array[AnyRef](numEntries)
    +    var i = 0
    +    var j = 0
    +    while (i < length) {
    +      if (!arrayData.isNullAt(i)) {
    --- End diff --
    
    Yeah, that sounds reasonable. Thanks.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to