dongjoon-hyun commented on a change in pull request #27803: [SPARK-31049][SQL] Support nested adjacent generators, e.g., explode(explode(v)) URL: https://github.com/apache/spark/pull/27803#discussion_r399906896
########## File path: sql/core/src/test/scala/org/apache/spark/sql/GeneratorFunctionSuite.scala ########## @@ -344,12 +345,55 @@ class GeneratorFunctionSuite extends QueryTest with SharedSparkSession { } } + test("Supported nested inner generators") { + // Project cases + checkAnswer( + sql("SELECT explode(explode(array(array(1, 2), array(3))))"), + Row(1) :: Row(2) :: Row(3) :: Nil) + checkAnswer( + sql("SELECT array(array(4), array(5)) v").select(explode_outer(explode_outer($"v"))), + Row(4) :: Row(5) :: Nil) + checkAnswer( + sql("SELECT posexplode(explode(array(array(1, 2), array(3))))"), + Row(0, 1) :: Row(1, 2) :: Row(0, 3) :: Nil) + checkAnswer( + sql("SELECT array(array(4), array(5)) v").select(posexplode_outer(explode($"v"))), + Row(0, 4) :: Row(0, 5) :: Nil) + checkAnswer( + sql("SELECT inline(explode(array(array(struct(1, 'a'), struct(2, 'b')))))"), + Row(1, "a") :: Row(2, "b") :: Nil) + + // Aggregate cases + checkAnswer( + sql("SELECT explode_outer(explode(array(array(min(v), max(v))))) FROM VALUES 1, 2, 3 t(v)"), + Row(1) :: Row(3) :: Nil) + checkAnswer( + sql("SELECT array(array(min(v), max(v))) ar FROM VALUES 7, 9 t(v)") + .select(explode(explode_outer($"ar"))), + Row(7) :: Row(9) :: Nil) Review comment: Can we add more deep test case like the following? ```scala scala> sql("select explode(explode(explode(explode(array(array(array(array(1, 2), array(3, 4))))))))").show() +---+ |col| +---+ | 1| | 2| | 3| | 4| +---+ ``` ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org