Github user felixcheung commented on a diff in the pull request:
https://github.com/apache/spark/pull/13760#discussion_r67778002
--- Diff: R/pkg/R/group.R ---
@@ -191,18 +184,72 @@ createMethods()
setMethod("gapply",
signature(x = "GroupedData"),
function(x, func, schema) {
- try(if (is.null(schema)) stop("schema cannot be NULL"))
- packageNamesArr <- serialize(.sparkREnv[[".packages"]],
- connection = NULL)
- broadcastArr <- lapply(ls(.broadcastNames),
- function(name) { get(name, .broadcastNames)
})
- sdf <- callJStatic(
- "org.apache.spark.sql.api.r.SQLUtils",
- "gapply",
- x@sgd,
- serialize(cleanClosure(func), connection = NULL),
- packageNamesArr,
- broadcastArr,
- schema$jobj)
- dataFrame(sdf)
+ gapplyInternal(x, func, schema)
+ })
+
+#' gapplyCollect
+#'
+#' Applies a R function to each group in the input GroupedData and
collects the result
+#' back to R as a data.frame.
+#'
+#' @param x a GroupedData
+#' @param func A function to be applied to each group partition specified
by GroupedData.
+#' The function `func` takes as argument a key - grouping
columns and
+#' a data frame - a local R data.frame.
+#' The output of `func` is a local R data.frame.
+#' @return a SparkDataFrame
+#' @rdname gapplyCollect
+#' @name gapplyCollect
+#' @seealso gapply \link{gapply}
--- End diff --
please add `@export`
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]