cloud-fan commented on a change in pull request #23539: [SPARK-26617][SQL] 
Cache manager locks
URL: https://github.com/apache/spark/pull/23539#discussion_r250037457
 
 

 ##########
 File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/CacheManager.scala
 ##########
 @@ -160,34 +167,46 @@ class CacheManager extends Logging {
   /**
    * Tries to re-cache all the cache entries that refer to the given plan.
    */
-  def recacheByPlan(spark: SparkSession, plan: LogicalPlan): Unit = writeLock {
+  def recacheByPlan(spark: SparkSession, plan: LogicalPlan): Unit = {
     recacheByCondition(spark, _.find(_.sameResult(plan)).isDefined)
   }
 
   private def recacheByCondition(
       spark: SparkSession,
       condition: LogicalPlan => Boolean,
       clearCache: Boolean = true): Unit = {
-    val it = cachedData.iterator()
     val needToRecache = scala.collection.mutable.ArrayBuffer.empty[CachedData]
-    while (it.hasNext) {
-      val cd = it.next()
-      if (condition(cd.plan)) {
-        if (clearCache) {
-          cd.cachedRepresentation.cacheBuilder.clearCache()
+    writeLock {
+      val it = cachedData.iterator()
+      while (it.hasNext) {
+        val cd = it.next()
+        if (condition(cd.plan)) {
+          // Remove the cache entry before we create a new one, so that we can 
have a different
+          // physical plan.
+          needToRecache += cd
+          it.remove()
+        }
+      }
+    }
+    val recomputedPlans = needToRecache.map { cd =>
+      if (clearCache) {
+        cd.cachedRepresentation.cacheBuilder.clearCache()
+      }
+      val plan = spark.sessionState.executePlan(cd.plan).executedPlan
+      val newCache = InMemoryRelation(
+        cacheBuilder = 
cd.cachedRepresentation.cacheBuilder.withCachedPlan(plan),
+        logicalPlan = cd.plan)
+      cd.copy(cachedRepresentation = newCache)
+    }
+    writeLock {
 
 Review comment:
   instead of locking the entire loop, we can make the lock per-loop
   ```
   needToRecache.foreach { cd =>
     if (clearCache) {
       cd.cachedRepresentation.cacheBuilder.clearCache()
     }
     val plan = spark.sessionState.executePlan(cd.plan).executedPlan
     val newCache = InMemoryRelation(
       cacheBuilder = cd.cachedRepresentation.cacheBuilder.withCachedPlan(plan),
       logicalPlan = cd.plan)
     writeLock {
       cachedData.add(cd.copy(cachedRepresentation = newCache))
     }
   }
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to