HyukjinKwon commented on a change in pull request #23787: [SPARK-26830][SQL][R]
Vectorized R dapply() implementation
URL: https://github.com/apache/spark/pull/23787#discussion_r257096271
##########
File path: R/pkg/tests/fulltests/test_sparkSQL.R
##########
@@ -3300,6 +3300,101 @@ test_that("dapplyCollect() on DataFrame with a binary
column", {
})
+test_that("dapply() Arrow optimization", {
+ skip_if_not_installed("arrow")
+ df <- createDataFrame(mtcars)
+
+ conf <- callJMethod(sparkSession, "conf")
+ arrowEnabled <- sparkR.conf("spark.sql.execution.arrow.enabled")[[1]]
+
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", "false")
+ tryCatch({
+ ret <- dapply(df,
+ function(rdf) {
+ stopifnot(class(rdf) == "data.frame")
+ rdf
+ },
+ schema(df))
+ expected <- collect(ret)
+ },
+ finally = {
+ # Resetting the conf back to default value
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", arrowEnabled)
+ })
+
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", "true")
+ tryCatch({
+ ret <- dapply(df,
+ function(rdf) {
+ stopifnot(class(rdf) == "data.frame")
+ rdf
+ },
+ schema(df))
+ actual <- collect(ret)
+ expect_equal(actual, expected)
+ },
+ finally = {
+ # Resetting the conf back to default value
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", arrowEnabled)
+ })
+})
+
+test_that("dapply() Arrow optimization - type specification", {
+ skip_if_not_installed("arrow")
+ # Note that regular dapply() seems not supporting date and timestamps
+ # whereas Arrow-optimized dapply() does.
+ rdf <- data.frame(list(list(a = 1,
+ b = "a",
+ c = TRUE,
+ d = 1.1,
+ e = 1L)))
+ df <- createDataFrame(rdf)
+
+ conf <- callJMethod(sparkSession, "conf")
+ arrowEnabled <- sparkR.conf("spark.sql.execution.arrow.enabled")[[1]]
+
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", "false")
+ tryCatch({
+ ret <- dapply(df, function(rdf) { rdf }, schema(df))
+ expected <- collect(ret)
+ },
+ finally = {
+ # Resetting the conf back to default value
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", arrowEnabled)
+ })
+
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", "true")
+ tryCatch({
+ ret <- dapply(df, function(rdf) { rdf }, schema(df))
+ actual <- collect(ret)
+ expect_equal(actual, expected)
+ },
+ finally = {
+ # Resetting the conf back to default value
+ callJMethod(conf, "set", "spark.sql.execution.arrow.enabled", arrowEnabled)
+ })
+})
+
+test_that("dapply() Arrow optimization - type specification (date and
timestamp)", {
+ skip_if_not_installed("arrow")
+ rdf <- data.frame(list(list(a = as.Date("1990-02-24"),
+ b = as.POSIXct("1990-02-24 12:34:56"))))
Review comment:
I plan to add dates and timestamp support for dapply as well so that we can
deduplicate tests.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]