Github user felixcheung commented on a diff in the pull request: https://github.com/apache/spark/pull/22455#discussion_r223197875 --- Diff: R/pkg/R/DataFrame.R --- @@ -246,30 +248,38 @@ setMethod("showDF", #' @note show(SparkDataFrame) since 1.4.0 setMethod("show", "SparkDataFrame", function(object) { - allConf <- sparkR.conf() - if (!is.null(allConf[["spark.sql.repl.eagerEval.enabled"]]) && - identical(allConf[["spark.sql.repl.eagerEval.enabled"]], "true")) { - argsList <- list() - argsList$x <- object - if (!is.null(allConf[["spark.sql.repl.eagerEval.maxNumRows"]])) { - numRows <- as.numeric(allConf[["spark.sql.repl.eagerEval.maxNumRows"]]) - if (numRows > 0) { - argsList$numRows <- numRows + showFunc <- getOption("sparkr.SparkDataFrame.base_show_func") --- End diff -- could we consider leaving print/show option out? I'd like to get eager compute to work even in basic sparkR / R shell
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org