Github user maropu commented on a diff in the pull request:
https://github.com/apache/spark/pull/21860#discussion_r208788502
--- Diff:
sql/core/src/test/scala/org/apache/spark/sql/execution/WholeStageCodegenSuite.scala
---
@@ -232,6 +232,25 @@ class WholeStageCodegenSuite extends QueryTest with
SharedSQLContext {
}
}
+ test("SPARK-24901 check merge FastHashMap and RegularHashMap generate
code max size") {
+ var twoLevelMaxCodeSize: Int = 0
+ val caseNumber = 80
+ // merge fastHashMap and regularHashMap generate code max size
+ val codeWithLongFunctions = genGroupByCode(caseNumber)
+ val (_, maxCodeSize) = CodeGenerator.compile(codeWithLongFunctions)
+
+ // master fastHashMap and regularHashMap generate code max size
+ withSQLConf("spark.sql.codegen.aggregate.map.twolevel.enabled" ->
"true",
+ "spark.sql.codegen.aggregate.map.vectorized.enable" -> "true") {
+ val codeWithLongFunction1 = genGroupByCode(caseNumber)
+ val (_, maxCodeSize1) = CodeGenerator.compile(codeWithLongFunction1)
+ // maxCodeSize1: 27062
+ twoLevelMaxCodeSize = maxCodeSize1
+ }
+
+ assert(2 * maxCodeSize < twoLevelMaxCodeSize)
+ }
--- End diff --
We need this test? I think it's ok to pass the existing tests.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]