Github user kiszk commented on a diff in the pull request: https://github.com/apache/spark/pull/19813#discussion_r153848791 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala --- @@ -115,9 +118,240 @@ abstract class Expression extends TreeNode[Expression] { } } + /** + * Records current input row and variables for this expression into created `ExprCode`. + */ + private def populateInputs(ctx: CodegenContext, eval: ExprCode): Unit = { + if (ctx.INPUT_ROW != null) { + eval.inputRow = ctx.INPUT_ROW + } + if (ctx.currentVars != null) { + val boundRefs = this.collect { + case b @ BoundReference(ordinal, _, _) if ctx.currentVars(ordinal) != null => (ordinal, b) + }.toMap + + ctx.currentVars.zipWithIndex.filter(_._1 != null).foreach { case (currentVar, idx) => + if (boundRefs.contains(idx)) { + val inputVar = ExprInputVar(boundRefs(idx), exprCode = currentVar) + eval.inputVars += inputVar + } + } + } + } + + /** + * Returns the eliminated subexpressions in the children expressions. + */ + private def getSubExprInChildren(ctx: CodegenContext): Seq[Expression] = { + children.flatMap { child => + child.collect { + case e if ctx.subExprEliminationExprs.contains(e) => e + } + } + } + + /** + * Given the list of eliminated subexpressions used in the children expressions, returns the + * strings of funtion parameters. The first is the variable names used to call the function, + * the second is the parameters used to declare the function in generated code. + */ + private def getParamsForSubExprs( + ctx: CodegenContext, + subExprs: Seq[Expression]): (Seq[String], Seq[String]) = { + subExprs.flatMap { subExpr => + val arguType = ctx.javaType(subExpr.dataType) --- End diff -- nit: `argType`?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org