This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 66c5cd2 [SPARK-27151][SQL] ClearCacheCommand extends IgnoreCahedData
to avoid plan node copys
66c5cd2 is described below
commit 66c5cd2d9c27c992ad64261919d94f074c391aba
Author: Takeshi Yamamuro <[email protected]>
AuthorDate: Thu Mar 14 11:36:16 2019 -0700
[SPARK-27151][SQL] ClearCacheCommand extends IgnoreCahedData to avoid plan
node copys
## What changes were proposed in this pull request?
In SPARK-27011, we introduced `IgnoreCahedData` to avoid plan node copys in
`CacheManager`.
Since `ClearCacheCommand` has no argument, it also can extend
`IgnoreCahedData`.
## How was this patch tested?
Pass Jenkins.
Closes #24081 from maropu/SPARK-27011-2.
Authored-by: Takeshi Yamamuro <[email protected]>
Signed-off-by: Dongjoon Hyun <[email protected]>
---
.../main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala | 2 +-
.../scala/org/apache/spark/sql/execution/command/SetCommand.scala | 2 +-
.../main/scala/org/apache/spark/sql/execution/command/cache.scala | 7 ++-----
3 files changed, 4 insertions(+), 7 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index c17cf5d..a9a5e6ec 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -297,7 +297,7 @@ class SparkSqlAstBuilder(conf: SQLConf) extends
AstBuilder(conf) {
* Create a [[ClearCacheCommand]] logical plan.
*/
override def visitClearCache(ctx: ClearCacheContext): LogicalPlan =
withOrigin(ctx) {
- ClearCacheCommand()
+ ClearCacheCommand
}
/**
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
index ca25dc5..45c62b4 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/SetCommand.scala
@@ -162,7 +162,7 @@ object SetCommand {
* reset;
* }}}
*/
-case object ResetCommand extends RunnableCommand with IgnoreCachedData with
Logging {
+case object ResetCommand extends RunnableCommand with IgnoreCachedData {
override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.sessionState.conf.clear()
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
index 728604a..7b00769 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/cache.scala
@@ -23,7 +23,7 @@ import org.apache.spark.sql.{Dataset, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.plans.QueryPlan
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.catalyst.plans.logical.{IgnoreCachedData,
LogicalPlan}
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.storage.StorageLevel
@@ -83,13 +83,10 @@ case class UncacheTableCommand(
/**
* Clear all cached data from the in-memory cache.
*/
-case class ClearCacheCommand() extends RunnableCommand {
+case object ClearCacheCommand extends RunnableCommand with IgnoreCachedData {
override def run(sparkSession: SparkSession): Seq[Row] = {
sparkSession.catalog.clearCache()
Seq.empty[Row]
}
-
- /** [[org.apache.spark.sql.catalyst.trees.TreeNode.makeCopy()]] does not
support 0-arg ctor. */
- override def makeCopy(newArgs: Array[AnyRef]): ClearCacheCommand =
ClearCacheCommand()
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]