sunchao commented on code in PR #109:
URL:
https://github.com/apache/arrow-datafusion-comet/pull/109#discussion_r1505418227
##########
spark/src/test/scala/org/apache/comet/exec/CometAggregateSuite.scala:
##########
@@ -537,23 +537,18 @@ class CometAggregateSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
withSQLConf(CometConf.COMET_BATCH_SIZE.key ->
batchSize.toString) {
// Test all combinations of different aggregation & group-by
types
- (1 to 4).foreach { col =>
- (1 to 14).foreach { gCol =>
- withView("v") {
- sql(s"CREATE TEMP VIEW v AS SELECT _g$gCol, _$col FROM
tbl ORDER BY _$col")
- checkSparkAnswer(s"SELECT _g$gCol, FIRST(_$col) FROM v
GROUP BY _g$gCol")
- checkSparkAnswer(s"SELECT _g$gCol, LAST(_$col) FROM v
GROUP BY _g$gCol")
- }
- checkSparkAnswer(s"SELECT _g$gCol, SUM(_$col) FROM tbl
GROUP BY _g$gCol")
- checkSparkAnswer(
- s"SELECT _g$gCol, SUM(DISTINCT _$col) FROM tbl GROUP BY
_g$gCol")
- checkSparkAnswer(s"SELECT _g$gCol, COUNT(_$col) FROM tbl
GROUP BY _g$gCol")
- checkSparkAnswer(
- s"SELECT _g$gCol, COUNT(DISTINCT _$col) FROM tbl GROUP
BY _g$gCol")
- checkSparkAnswer(
- s"SELECT _g$gCol, MIN(_$col), MAX(_$col) FROM tbl GROUP
BY _g$gCol")
- checkSparkAnswer(s"SELECT _g$gCol, AVG(_$col) FROM tbl
GROUP BY _g$gCol")
+ (1 to 14).foreach { gCol =>
+ withView("v") {
+ sql(s"CREATE TEMP VIEW v AS SELECT _g$gCol, _1, _2, _3, _4
" +
+ "FROM tbl ORDER BY _1, _2, _3, _4")
+ checkSparkAnswer(s"SELECT _g$gCol, FIRST(_1), FIRST(_2),
FIRST(_3), " +
+ s"FIRST(_4), LAST(_1), LAST(_2), LAST(_3), LAST(_4) FROM
v GROUP BY _g$gCol")
}
+ checkSparkAnswer(s"SELECT _g$gCol, SUM(_1), SUM(_2),
COUNT(_3), COUNT(_4), " +
Review Comment:
Yes, the code path is pretty much the same though, so I thought it is fine
to lose some precision here.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]