Repository: spark Updated Branches: refs/heads/master b03af8b58 -> 4117786a8
[SPARK-22716][SQL] Avoid the creation of mutable states in addReferenceObj ## What changes were proposed in this pull request? We have two methods to reference an object `addReferenceMinorObj` and `addReferenceObj `. The latter creates a new global variable, which means new entries in the constant pool. The PR unifies the two method in a single `addReferenceObj` which returns the code to access the object in the `references` array and doesn't add new mutable states. ## How was this patch tested? added UTs. Author: Marco Gaido <[email protected]> Closes #19916 from mgaido91/SPARK-22716. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4117786a Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4117786a Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4117786a Branch: refs/heads/master Commit: 4117786a87f9d7631dec58a8e7aef09403b20a27 Parents: b03af8b Author: Marco Gaido <[email protected]> Authored: Wed Dec 13 10:29:14 2017 +0800 Committer: Wenchen Fan <[email protected]> Committed: Wed Dec 13 10:29:14 2017 +0800 ---------------------------------------------------------------------- .../spark/sql/catalyst/expressions/Cast.scala | 8 +++---- .../sql/catalyst/expressions/ScalaUDF.scala | 2 +- .../expressions/codegen/CodeGenerator.scala | 22 ++++---------------- .../expressions/datetimeExpressions.scala | 20 +++++++++--------- .../sql/catalyst/expressions/literals.scala | 9 ++++---- .../spark/sql/catalyst/expressions/misc.scala | 2 +- .../catalyst/expressions/objects/objects.scala | 8 +++---- .../expressions/CodeGenerationSuite.scala | 7 +++++++ .../expressions/MiscExpressionsSuite.scala | 1 - .../catalyst/expressions/ScalaUDFSuite.scala | 3 ++- .../aggregate/RowBasedHashMapGenerator.scala | 4 ++-- .../aggregate/VectorizedHashMapGenerator.scala | 4 ++-- 12 files changed, 42 insertions(+), 48 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index b8d3661..5279d41 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -605,7 +605,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String (c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString( org.apache.spark.sql.catalyst.util.DateTimeUtils.dateToString($c));""" case TimestampType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) (c, evPrim, evNull) => s"""$evPrim = UTF8String.fromString( org.apache.spark.sql.catalyst.util.DateTimeUtils.timestampToString($c, $tz));""" case _ => @@ -633,7 +633,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String } """ case TimestampType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) (c, evPrim, evNull) => s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays($c / 1000L, $tz);" case _ => @@ -713,7 +713,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String from: DataType, ctx: CodegenContext): CastFunction = from match { case StringType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val longOpt = ctx.freshName("longOpt") (c, evPrim, evNull) => s""" @@ -730,7 +730,7 @@ case class Cast(child: Expression, dataType: DataType, timeZoneId: Option[String case _: IntegralType => (c, evPrim, evNull) => s"$evPrim = ${longToTimeStampCode(c)};" case DateType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) (c, evPrim, evNull) => s"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.daysToMillis($c, $tz) * 1000;" case DecimalType() => http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala index 4d26d98..a3cf761 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala @@ -1000,7 +1000,7 @@ case class ScalaUDF( ctx: CodegenContext, ev: ExprCode): ExprCode = { val scalaUDF = ctx.freshName("scalaUDF") - val scalaUDFRef = ctx.addReferenceMinorObj(this, scalaUDFClassName) + val scalaUDFRef = ctx.addReferenceObj("scalaUDFRef", this, scalaUDFClassName) // Object to convert the returned value of user-defined functions to Catalyst type val catalystConverterTerm = ctx.freshName("catalystConverter") http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala index 5c9e604..4b8b16f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala @@ -109,28 +109,14 @@ class CodegenContext { * * Returns the code to access it. * - * This is for minor objects not to store the object into field but refer it from the references - * field at the time of use because number of fields in class is limited so we should reduce it. + * This does not to store the object into field but refer it from the references field at the + * time of use because number of fields in class is limited so we should reduce it. */ - def addReferenceMinorObj(obj: Any, className: String = null): String = { + def addReferenceObj(objName: String, obj: Any, className: String = null): String = { val idx = references.length references += obj val clsName = Option(className).getOrElse(obj.getClass.getName) - s"(($clsName) references[$idx])" - } - - /** - * Add an object to `references`, create a class member to access it. - * - * Returns the name of class member. - */ - def addReferenceObj(name: String, obj: Any, className: String = null): String = { - val term = freshName(name) - val idx = references.length - references += obj - val clsName = Option(className).getOrElse(obj.getClass.getName) - addMutableState(clsName, term, s"$term = ($clsName) references[$idx];") - term + s"(($clsName) references[$idx] /* $objName */)" } /** http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index eaf8788..44d54a2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -226,7 +226,7 @@ case class Hour(child: Expression, timeZoneId: Option[String] = None) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, c => s"$dtu.getHours($c, $tz)") } @@ -257,7 +257,7 @@ case class Minute(child: Expression, timeZoneId: Option[String] = None) } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, c => s"$dtu.getMinutes($c, $tz)") } @@ -288,7 +288,7 @@ case class Second(child: Expression, timeZoneId: Option[String] = None) } override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, c => s"$dtu.getSeconds($c, $tz)") } @@ -529,7 +529,7 @@ case class DateFormatClass(left: Expression, right: Expression, timeZoneId: Opti override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) defineCodeGen(ctx, ev, (timestamp, format) => { s"""UTF8String.fromString($dtu.newDateFormat($format.toString(), $tz) .format(new java.util.Date($timestamp / 1000)))""" @@ -691,7 +691,7 @@ abstract class UnixTime }""") } case StringType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") nullSafeCodeGen(ctx, ev, (string, format) => { s""" @@ -715,7 +715,7 @@ abstract class UnixTime ${ev.value} = ${eval1.value} / 1000000L; }""") case DateType => - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") val eval1 = left.genCode(ctx) ev.copy(code = s""" @@ -827,7 +827,7 @@ case class FromUnixTime(sec: Expression, format: Expression, timeZoneId: Option[ }""") } } else { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") nullSafeCodeGen(ctx, ev, (seconds, f) => { s""" @@ -969,7 +969,7 @@ case class TimeAdd(start: Expression, interval: Expression, timeZoneId: Option[S } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (sd, i) => { s"""$dtu.timestampAddInterval($sd, $i.months, $i.microseconds, $tz)""" @@ -1065,7 +1065,7 @@ case class TimeSub(start: Expression, interval: Expression, timeZoneId: Option[S } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (sd, i) => { s"""$dtu.timestampAddInterval($sd, 0 - $i.months, 0 - $i.microseconds, $tz)""" @@ -1143,7 +1143,7 @@ case class MonthsBetween(date1: Expression, date2: Expression, timeZoneId: Optio } override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { - val tz = ctx.addReferenceMinorObj(timeZone) + val tz = ctx.addReferenceObj("timeZone", timeZone) val dtu = DateTimeUtils.getClass.getName.stripSuffix("$") defineCodeGen(ctx, ev, (l, r) => { s"""$dtu.monthsBetween($l, $r, $tz)""" http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index eaeaf08..383203a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -290,7 +290,7 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { case FloatType => val v = value.asInstanceOf[Float] if (v.isNaN || v.isInfinite) { - val boxedValue = ctx.addReferenceMinorObj(v) + val boxedValue = ctx.addReferenceObj("boxedValue", v) val code = s"final $javaType ${ev.value} = ($javaType) $boxedValue;" ev.copy(code = code) } else { @@ -299,7 +299,7 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { case DoubleType => val v = value.asInstanceOf[Double] if (v.isNaN || v.isInfinite) { - val boxedValue = ctx.addReferenceMinorObj(v) + val boxedValue = ctx.addReferenceObj("boxedValue", v) val code = s"final $javaType ${ev.value} = ($javaType) $boxedValue;" ev.copy(code = code) } else { @@ -309,8 +309,9 @@ case class Literal (value: Any, dataType: DataType) extends LeafExpression { ev.copy(code = "", value = s"($javaType)$value") case TimestampType | LongType => ev.copy(code = "", value = s"${value}L") - case other => - ev.copy(code = "", value = ctx.addReferenceMinorObj(value, ctx.javaType(dataType))) + case _ => + ev.copy(code = "", value = ctx.addReferenceObj("literal", value, + ctx.javaType(dataType))) } } } http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala index b86e271..4b9006a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/misc.scala @@ -81,7 +81,7 @@ case class AssertTrue(child: Expression) extends UnaryExpression with ImplicitCa // Use unnamed reference that doesn't create a local field here to reduce the number of fields // because errMsgField is used only when the value is null or false. - val errMsgField = ctx.addReferenceMinorObj(errMsg) + val errMsgField = ctx.addReferenceObj("errMsg", errMsg) ExprCode(code = s"""${eval.code} |if (${eval.isNull} || !${eval.value}) { | throw new RuntimeException($errMsgField); http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala index 349afec..4bd395e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala @@ -1123,7 +1123,7 @@ case class CreateExternalRow(children: Seq[Expression], schema: StructType) expressions = childrenCodes, funcName = "createExternalRow", extraArguments = "Object[]" -> values :: Nil) - val schemaField = ctx.addReferenceMinorObj(schema) + val schemaField = ctx.addReferenceObj("schema", schema) val code = s""" @@ -1310,7 +1310,7 @@ case class AssertNotNull(child: Expression, walkedTypePath: Seq[String] = Nil) // Use unnamed reference that doesn't create a local field here to reduce the number of fields // because errMsgField is used only when the value is null. - val errMsgField = ctx.addReferenceMinorObj(errMsg) + val errMsgField = ctx.addReferenceObj("errMsg", errMsg) val code = s""" ${childGen.code} @@ -1347,7 +1347,7 @@ case class GetExternalRowField( override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { // Use unnamed reference that doesn't create a local field here to reduce the number of fields // because errMsgField is used only when the field is null. - val errMsgField = ctx.addReferenceMinorObj(errMsg) + val errMsgField = ctx.addReferenceObj("errMsg", errMsg) val row = child.genCode(ctx) val code = s""" ${row.code} @@ -1387,7 +1387,7 @@ case class ValidateExternalType(child: Expression, expected: DataType) override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { // Use unnamed reference that doesn't create a local field here to reduce the number of fields // because errMsgField is used only when the type doesn't match. - val errMsgField = ctx.addReferenceMinorObj(errMsg) + val errMsgField = ctx.addReferenceObj("errMsg", errMsg) val input = child.genCode(ctx) val obj = input.value http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala index 40bf29b..a969811 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala @@ -394,4 +394,11 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper { Map("add" -> Literal(1))).genCode(ctx) assert(ctx.mutableStates.isEmpty) } + + test("SPARK-22716: addReferenceObj should not add mutable states") { + val ctx = new CodegenContext + val foo = new Object() + ctx.addReferenceObj("foo", foo) + assert(ctx.mutableStates.isEmpty) + } } http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala index 4fe7b43..facc863 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MiscExpressionsSuite.scala @@ -43,5 +43,4 @@ class MiscExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { checkEvaluation(Length(Uuid()), 36) assert(evaluate(Uuid()) !== evaluate(Uuid())) } - } http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala ---------------------------------------------------------------------- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala index 70dea4b..b0687fe 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDFSuite.scala @@ -51,6 +51,7 @@ class ScalaUDFSuite extends SparkFunSuite with ExpressionEvalHelper { test("SPARK-22695: ScalaUDF should not use global variables") { val ctx = new CodegenContext ScalaUDF((s: String) => s + "x", StringType, Literal("a") :: Nil).genCode(ctx) - assert(ctx.mutableStates.isEmpty) + // ScalaUDF can be very verbose and trigger reduceCodeSize + assert(ctx.mutableStates.forall(_._2.startsWith("globalIsNull"))) } } http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/RowBasedHashMapGenerator.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/RowBasedHashMapGenerator.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/RowBasedHashMapGenerator.scala index 3718424..fd25707 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/RowBasedHashMapGenerator.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/RowBasedHashMapGenerator.scala @@ -47,7 +47,7 @@ class RowBasedHashMapGenerator( val generatedKeySchema: String = s"new org.apache.spark.sql.types.StructType()" + groupingKeySchema.map { key => - val keyName = ctx.addReferenceMinorObj(key.name) + val keyName = ctx.addReferenceObj("keyName", key.name) key.dataType match { case d: DecimalType => s""".add($keyName, org.apache.spark.sql.types.DataTypes.createDecimalType( @@ -60,7 +60,7 @@ class RowBasedHashMapGenerator( val generatedValueSchema: String = s"new org.apache.spark.sql.types.StructType()" + bufferSchema.map { key => - val keyName = ctx.addReferenceMinorObj(key.name) + val keyName = ctx.addReferenceObj("keyName", key.name) key.dataType match { case d: DecimalType => s""".add($keyName, org.apache.spark.sql.types.DataTypes.createDecimalType( http://git-wip-us.apache.org/repos/asf/spark/blob/4117786a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/VectorizedHashMapGenerator.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/VectorizedHashMapGenerator.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/VectorizedHashMapGenerator.scala index f04cd48..0380ee8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/VectorizedHashMapGenerator.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/VectorizedHashMapGenerator.scala @@ -54,7 +54,7 @@ class VectorizedHashMapGenerator( val generatedSchema: String = s"new org.apache.spark.sql.types.StructType()" + (groupingKeySchema ++ bufferSchema).map { key => - val keyName = ctx.addReferenceMinorObj(key.name) + val keyName = ctx.addReferenceObj("keyName", key.name) key.dataType match { case d: DecimalType => s""".add($keyName, org.apache.spark.sql.types.DataTypes.createDecimalType( @@ -67,7 +67,7 @@ class VectorizedHashMapGenerator( val generatedAggBufferSchema: String = s"new org.apache.spark.sql.types.StructType()" + bufferSchema.map { key => - val keyName = ctx.addReferenceMinorObj(key.name) + val keyName = ctx.addReferenceObj("keyName", key.name) key.dataType match { case d: DecimalType => s""".add($keyName, org.apache.spark.sql.types.DataTypes.createDecimalType( --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
