Github user gatorsmile commented on a diff in the pull request:
https://github.com/apache/spark/pull/19083#discussion_r142582571
--- Diff:
sql/core/src/main/scala/org/apache/spark/sql/execution/WholeStageCodegenExec.scala
---
@@ -380,23 +380,26 @@ case class WholeStageCodegenExec(child: SparkPlan)
extends UnaryExecNode with Co
override def doExecute(): RDD[InternalRow] = {
val (ctx, cleanedSource) = doCodeGen()
- if (ctx.isTooLongGeneratedFunction) {
- logWarning("Found too long generated codes and JIT optimization
might not work, " +
- "Whole-stage codegen disabled for this plan, " +
- "You can change the config spark.sql.codegen.MaxFunctionLength " +
- "to adjust the function length limit:\n "
- + s"$treeString")
- return child.execute()
- }
// try to compile and fallback if it failed
- try {
+ val (_, maxCodeSize) = try {
CodeGenerator.compile(cleanedSource)
} catch {
case _: Exception if !Utils.isTesting &&
sqlContext.conf.codegenFallback =>
// We should already saw the error message
logWarning(s"Whole-stage codegen disabled for this plan:\n
$treeString")
return child.execute()
}
+
+ // Check if compiled code has a too large function
+ if (maxCodeSize > sqlContext.conf.hugeMethodLimit) {
+ logWarning(s"Found too long generated codes and JIT optimization
might not work: " +
+ s"the bytecode size was $maxCodeSize, this value went over the
limit " +
+ s"${sqlContext.conf.hugeMethodLimit}, and the whole-stage codegen
was disable " +
+ s"for this plan. To avoid this, you can set the limit " +
--- End diff --
`set ` -> `raise `. then, remove `higher`
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]