Github user gczsjdy commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16476#discussion_r101468470
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
 ---
    @@ -340,3 +343,104 @@ object CaseKeyWhen {
         CaseWhen(cases, elseValue)
       }
     }
    +
    +/**
    + * A function that returns the index of expr in (expr1, expr2, ...) list 
or 0 if not found.
    + * It takes at least 2 parameters, and all parameters should be subtype of 
AtomicType or NullType.
    + * It's also acceptable to give parameters of different types. When the 
parameters have different
    + * types, comparing will be done based on type firstly. For example, 
''999'' 's type is StringType,
    + * while 999's type is IntegerType, so that no further comparison need to 
be done since they have
    + * different types.
    + * If the search expression is NULL, the return value is 0 because NULL 
fails equality comparison
    + * with any value.
    + * To also point out, no implicit cast will be done in this expression.
    + */
    +// scalastyle:off line.size.limit
    +@ExpressionDescription(
    +  usage = "_FUNC_(expr, expr1, expr2, ...) - Returns the index of expr in 
the expr1, expr2, ... or 0 if not found.",
    +  extended = """
    +    Examples:
    +      > SELECT _FUNC_(10, 9, 3, 10, 4);
    +       3
    +      > SELECT _FUNC_('a', 'b', 'c', 'd', 'a');
    +       4
    +      > SELECT _FUNC_('999', 'a', 999, 9.99, '999');
    +       4
    +  """)
    +// scalastyle:on line.size.limit
    +case class Field(children: Seq[Expression]) extends Expression {
    +
    +  /** Even if expr is not found in (expr1, expr2, ...) list, the value 
will be 0, not null */
    +  override def nullable: Boolean = false
    +  override def foldable: Boolean = children.forall(_.foldable)
    +
    +  private lazy val ordering = 
TypeUtils.getInterpretedOrdering(children(0).dataType)
    +
    +  private val dataTypeMatchIndex: Array[Int] = 
children.zipWithIndex.tail.filter(
    +    _._1.dataType.sameType(children.head.dataType)).map(_._2).toArray
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    if (children.length <= 1) {
    +      TypeCheckResult.TypeCheckFailure(s"FIELD requires at least 2 
arguments")
    +    } else if (!children.forall(
    +        e => e.dataType.isInstanceOf[AtomicType] || 
e.dataType.isInstanceOf[NullType])) {
    +      TypeCheckResult.TypeCheckFailure(s"FIELD requires all arguments to 
be of AtomicType")
    +    } else {
    +      TypeCheckResult.TypeCheckSuccess
    +    }
    +  }
    +
    +  override def dataType: DataType = IntegerType
    +  override def eval(input: InternalRow): Any = {
    +    val target = children.head.eval(input)
    +    @tailrec def findEqual(index: Int): Int = {
    +      if (index == dataTypeMatchIndex.length) {
    +        0
    +      } else {
    +        val value = children(dataTypeMatchIndex(index)).eval(input)
    +        if (value != null && ordering.equiv(target, value)) {
    +          dataTypeMatchIndex(index)
    +        } else {
    +          findEqual(index + 1)
    +        }
    +      }
    +    }
    +    if (target == null) 0 else findEqual(index = 0)
    +  }
    +
    +  protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    val evalChildren = children.map(_.genCode(ctx))
    --- End diff --
    
    There is a filter function on line 441 to avoid genCode for type mismatch 
children
    `filter(x => dataTypeMatchIndex.contains(x._2))`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to