jaceklaskowski commented on code in PR #34558:
URL: https://github.com/apache/spark/pull/34558#discussion_r1156370630
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala:
##########
@@ -172,6 +172,40 @@ class CodegenContext extends Logging {
*/
var currentVars: Seq[ExprCode] = null
+ /**
+ * Holding a map of current lambda variables.
+ */
+ var currentLambdaVars: mutable.Map[String, ExprCode] = mutable.HashMap.empty
+
+ def withLambdaVars(namedLambdas: Seq[NamedLambdaVariable],
+ f: Seq[ExprCode] => ExprCode): ExprCode = {
+ val lambdaVars = namedLambdas.map { namedLambda =>
+ val name = namedLambda.variableName
+ if (currentLambdaVars.get(name).nonEmpty) {
+ throw QueryExecutionErrors.lambdaVariableAlreadyDefinedError(name)
+ }
+ val isNull = if (namedLambda.nullable) {
+ JavaCode.isNullGlobal(addMutableState(JAVA_BOOLEAN, "lambdaIsNull"))
+ } else {
+ FalseLiteral
+ }
+ val value = addMutableState(javaType(namedLambda.dataType),
"lambdaValue")
+ val lambdaVar = ExprCode(isNull, JavaCode.global(value,
namedLambda.dataType))
+ currentLambdaVars.put(name, lambdaVar)
+ lambdaVar
+ }
+
+ val result = f(lambdaVars)
+ namedLambdas.foreach(v => currentLambdaVars.remove(v.variableName))
Review Comment:
nit: s/v/lamba? Don't want to ask for `namedLambda` as used earlier, but `v`
does not fit really.
I'd also consider
`namedLambdas.map(_.variableName).foreach(currentLambdaVars.remove)`
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala:
##########
@@ -101,6 +101,14 @@ case class NamedLambdaVariable(
override def simpleString(maxFields: Int): String = {
s"lambda $name#${exprId.id}: ${dataType.simpleString(maxFields)}"
}
+
+ // We need to include the Expr ID in the Codegen variable name since several
tests bypass
+ // `UnresolvedNamedLambdaVariable.freshVarName`
+ lazy val variableName = s"${name}_${exprId.id}"
Review Comment:
It does not seem consistent with `simpleString`. Is this intentional?
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala:
##########
@@ -172,6 +172,40 @@ class CodegenContext extends Logging {
*/
var currentVars: Seq[ExprCode] = null
+ /**
+ * Holding a map of current lambda variables.
+ */
+ var currentLambdaVars: mutable.Map[String, ExprCode] = mutable.HashMap.empty
+
+ def withLambdaVars(namedLambdas: Seq[NamedLambdaVariable],
+ f: Seq[ExprCode] => ExprCode): ExprCode = {
+ val lambdaVars = namedLambdas.map { namedLambda =>
+ val name = namedLambda.variableName
+ if (currentLambdaVars.get(name).nonEmpty) {
+ throw QueryExecutionErrors.lambdaVariableAlreadyDefinedError(name)
+ }
+ val isNull = if (namedLambda.nullable) {
+ JavaCode.isNullGlobal(addMutableState(JAVA_BOOLEAN, "lambdaIsNull"))
+ } else {
+ FalseLiteral
+ }
+ val value = addMutableState(javaType(namedLambda.dataType),
"lambdaValue")
+ val lambdaVar = ExprCode(isNull, JavaCode.global(value,
namedLambda.dataType))
+ currentLambdaVars.put(name, lambdaVar)
+ lambdaVar
+ }
+
+ val result = f(lambdaVars)
+ namedLambdas.foreach(v => currentLambdaVars.remove(v.variableName))
+ result
+ }
+
+ def getLambdaVar(name: String): ExprCode = {
+ currentLambdaVars.getOrElse(name, {
Review Comment:
nit: Are these curly brackets required?
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala:
##########
@@ -350,6 +445,49 @@ case class ArrayTransform(
result
}
+ override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
+ ctx.withLambdaVars(Seq(elementVar) ++ indexVar, { lambdaExprs =>
Review Comment:
nit: Be consistent with `{`s; here, `{` is before an input argument to a
function literal while a few lines below it's after `=>`.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]