Github user dbtsai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21322#discussion_r189187178
  
    --- Diff: 
core/src/main/scala/org/apache/spark/storage/memory/MemoryStore.scala ---
    @@ -384,14 +385,37 @@ private[spark] class MemoryStore(
         }
       }
     
    +  private def maybeReleaseResources(entry: MemoryEntry[_]): Unit = {
    +    entry match {
    +      case SerializedMemoryEntry(buffer, _, _) => buffer.dispose()
    +      case DeserializedMemoryEntry(values: Array[Any], _, _) => 
maybeCloseValues(values)
    +      case _ =>
    +    }
    +  }
    +
    +  private def maybeCloseValues(values: Array[Any]): Unit = {
    +    values.foreach {
    +      case closable: AutoCloseable =>
    +        safelyCloseValue(closable)
    +      case _ =>
    +    }
    +  }
    +
    +  private def safelyCloseValue(closable: AutoCloseable): Unit = {
    +    try {
    +      closable.close()
    +    } catch {
    +      case ex: Exception => logWarning(s"Failed to close AutoClosable 
$closable", ex)
    +    }
    +  }
    +
       def remove(blockId: BlockId): Boolean = memoryManager.synchronized {
         val entry = entries.synchronized {
           entries.remove(blockId)
         }
         if (entry != null) {
    -      entry match {
    -        case SerializedMemoryEntry(buffer, _, _) => buffer.dispose()
    -        case _ =>
    +      if (blockId.isBroadcast) {
    +        maybeReleaseResources(entry)
    --- End diff --
    
    In this case, what happen when the blockId is not broadcast? The existing 
cleaning-up will not be called.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to