imback82 commented on a change in pull request #30815:
URL: https://github.com/apache/spark/pull/30815#discussion_r544823257
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
##########
@@ -88,12 +88,24 @@ class CacheManager extends Logging with
AdaptiveSparkPlanHelper {
query: Dataset[_],
tableName: Option[String] = None,
storageLevel: StorageLevel = MEMORY_AND_DISK): Unit = {
- val planToCache = query.logicalPlan
+ cacheQueryWithLogicalPlan(query.sparkSession, query.logicalPlan,
tableName, storageLevel)
+ }
+
+ /**
+ * Caches the data produced by the given [[LogicalPlan]].
+ * Unlike `RDD.cache()`, the default storage level is set to be
`MEMORY_AND_DISK` because
+ * recomputing the in-memory columnar representation of the underlying table
is expensive.
+ */
+ def cacheQueryWithLogicalPlan(
+ spark: SparkSession,
+ planToCache: LogicalPlan,
+ tableName: Option[String] = None,
Review comment:
I think https://github.com/apache/spark/pull/30815/files#r544822268
should work. Let me update this PR. Thanks!
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]