viirya commented on a change in pull request #26509: [SPARK-29427][SQL] Add API
to convert RelationalGroupedDataset to KeyValueGroupedDataset
URL: https://github.com/apache/spark/pull/26509#discussion_r347924995
##########
File path: sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
##########
@@ -2221,4 +2222,51 @@ class DataFrameSuite extends QueryTest with
SharedSparkSession {
val idTuples = sampled.collect().map(row => row.getLong(0) ->
row.getLong(1))
assert(idTuples.length == idTuples.toSet.size)
}
+
+ test("groupBy.keyAs") {
+ val df1 = Seq((1, 2, 3), (2, 3, 4)).toDF("a", "b", "c")
+ .repartition($"a", $"b").sortWithinPartitions("a", "b")
+ val df2 = Seq((1, 2, 4), (2, 3, 5)).toDF("a", "b", "c")
+ .repartition($"a", $"b").sortWithinPartitions("a", "b")
+
+ implicit val valueEncoder = RowEncoder(df1.schema)
+
+ val df3 = df1.groupBy("a", "b").as[GroupByKey, Row]
+ .cogroup(df2.groupBy("a", "b").as[GroupByKey, Row]) { case (_, data1,
data2) =>
+ data1.zip(data2).map { p =>
+ p._1.getInt(2) + p._2.getInt(2)
+ }
+ }.toDF
+
+ checkAnswer(df3.sort("value"), Row(7) :: Row(9) :: Nil)
+
+ // Assert that no extra shuffle introduced by cogroup.
+ val exchanges = df3.queryExecution.executedPlan.collect {
+ case h: ShuffleExchangeExec => h
+ }
+ assert(exchanges.size == 2)
+ }
+
+ test("groupBy.keyAs: custom grouping expressions") {
+ val df1 = Seq((1, 2, 3), (2, 3, 4)).toDF("a1", "b", "c")
+ .repartition($"a1", $"b").sortWithinPartitions("a1", "b")
+ val df2 = Seq((1, 2, 4), (2, 3, 5)).toDF("a1", "b", "c")
+ .repartition($"a1", $"b").sortWithinPartitions("a1", "b")
Review comment:
no, because we need to alias `a + 1` to `a`, in order to make
deserialization of `GroupByKey`, there still shuffle brought by cogroup.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]