dtenedor commented on a change in pull request #35735:
URL: https://github.com/apache/spark/pull/35735#discussion_r821923561



##########
File path: sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
##########
@@ -4328,6 +4328,37 @@ class SQLQuerySuite extends QueryTest with 
SharedSparkSession with AdaptiveSpark
         Row(3, 2, 6) :: Nil)
     }
   }
+
+  test("SC-93338: Cover the histogram_numeric aggregate function for many 
input types") {
+    for (query <- Seq(
+      "SELECT histogram_numeric(col, 3) FROM VALUES (1), (2), (3) AS tab(col)",
+      "SELECT histogram_numeric(col, 3) FROM VALUES (1L), (2L), (3L) AS 
tab(col)",
+      "SELECT histogram_numeric(col, 3) FROM VALUES (1F), (2F), (3F) AS 
tab(col)",
+      "SELECT histogram_numeric(col, 3) FROM VALUES (1D), (2D), (3D) AS 
tab(col)",
+      "SELECT histogram_numeric(col, 3) FROM VALUES (1S), (2S), (3S) AS 
tab(col)",
+      """SELECT histogram_numeric(col, 3) FROM VALUES
+        (CAST(1 AS BYTE)), (CAST(2 AS BYTE)), (CAST(3 AS BYTE)) AS tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES
+        (CAST(1 AS TINYINT)), (CAST(2 AS TINYINT)), (CAST(3 AS TINYINT)) AS 
tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES
+        (CAST(1 AS SMALLINT)), (CAST(2 AS SMALLINT)), (CAST(3 AS SMALLINT)) AS 
tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES
+        (CAST(1 AS BIGINT)), (CAST(2 AS BIGINT)), (CAST(3 AS BIGINT)) AS 
tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES (TIMESTAMP '2017-03-01 
00:00:00'),
+        (TIMESTAMP '2017-04-01 00:00:00'), (TIMESTAMP '2017-05-01 00:00:00') 
AS tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES (INTERVAL '100-00' YEAR 
TO MONTH),
+        (INTERVAL '110-00' YEAR TO MONTH), (INTERVAL '120-00' YEAR TO MONTH) 
AS tab(col)""",
+      """SELECT histogram_numeric(col, 3) FROM VALUES (INTERVAL '12 20:4:0' 
DAY TO SECOND),
+        (INTERVAL '12 21:4:0' DAY TO SECOND), (INTERVAL '12 22:4:0' DAY TO 
SECOND) AS tab(col)"""
+    )) {
+      val df = sql(query)
+      val result = df.collect.head
+      // As a basic sanity check, ensure that the result is not NULL. This 
makes sure that we run
+      // all stages of the aggregate function and evaluate the result to some 
concrete value each
+      // time.
+      assert(!result.isNullAt(0))

Review comment:
       Good point, done.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to