Github user gczsjdy commented on a diff in the pull request:

    https://github.com/apache/spark/pull/16476#discussion_r101673768
  
    --- Diff: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
 ---
    @@ -340,3 +341,91 @@ object CaseKeyWhen {
         CaseWhen(cases, elseValue)
       }
     }
    +
    +/**
    + * A function that returns the index of str in (str1, str2, ...) list or 0 
if not found.
    + * It takes at least 2 parameters, and all parameters' types should be 
subtypes of AtomicType.
    + */
    +@ExpressionDescription(
    +  usage = "_FUNC_(str, str1, str2, ...) - Returns the index of str in the 
str1,str2,... or 0 if not found.",
    +  extended = """
    +    Examples:
    +      > SELECT _FUNC_(10, 9, 3, 10, 4);
    +       3
    +  """)
    +case class Field(children: Seq[Expression]) extends Expression {
    +
    +  override def nullable: Boolean = false
    +  override def foldable: Boolean = children.forall(_.foldable)
    +
    +  private lazy val ordering = 
TypeUtils.getInterpretedOrdering(children(0).dataType)
    +
    +  override def checkInputDataTypes(): TypeCheckResult = {
    +    if (children.length <= 1) {
    +      TypeCheckResult.TypeCheckFailure(s"FIELD requires at least 2 
arguments")
    +    } else if (!children.forall(_.dataType.isInstanceOf[AtomicType])) {
    +      TypeCheckResult.TypeCheckFailure(s"FIELD requires all arguments to 
be of AtomicType")
    +    } else
    +      TypeCheckResult.TypeCheckSuccess
    +  }
    +
    +  override def dataType: DataType = IntegerType
    +
    +  override def eval(input: InternalRow): Any = {
    +    val target = children.head.eval(input)
    +    val targetDataType = children.head.dataType
    +    def findEqual(target: Any, params: Seq[Expression], index: Int): Int = 
{
    +      params.toList match {
    +        case Nil => 0
    +        case head::tail if targetDataType == head.dataType
    +          && head.eval(input) != null && ordering.equiv(target, 
head.eval(input)) => index
    +        case _ => findEqual(target, params.tail, index + 1)
    +      }
    +    }
    +    if(target == null)
    +      0
    +    else
    +      findEqual(target, children.tail, 1)
    +  }
    +
    +  protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
    +    val evalChildren = children.map(_.genCode(ctx))
    +    val target = evalChildren(0)
    +    val targetDataType = children(0).dataType
    +    val rest = evalChildren.drop(1)
    +    val restDataType = children.drop(1).map(_.dataType)
    +
    +    def updateEval(evalWithIndex: ((ExprCode, DataType), Int)): String = {
    +      val ((eval, dataType), index) = evalWithIndex
    +      s"""
    +        ${eval.code}
    +        if (${dataType.equals(targetDataType)}
    +          && ${ctx.genEqual(targetDataType, eval.value, target.value)}) {
    +          ${ev.value} = ${index};
    +        }
    +      """
    +    }
    +
    +    def genIfElseStructure(code1: String, code2: String): String = {
    --- End diff --
    
    For now, I use `reduceRight`, which I think is a 'special case' function of 
`foldRight`.
    If I understand your meaning of floating `else` right(could you please 
explain it a little bit?), these 2 functions both can't avoid floating `else`, 
because we need nested `else` in `else` block, like this:
    `if (xxx)
    else {
      if (xxx)
      else {
      ...
      }
    }
    `, so if we avoid floating `else` in `genIfElseStructure`, `else` should be 
in `updateEval`, which will make the code unclear and complicated.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to