Ngone51 commented on a change in pull request #28911:
URL: https://github.com/apache/spark/pull/28911#discussion_r480094857
##########
File path: core/src/main/scala/org/apache/spark/storage/BlockManager.scala
##########
@@ -120,34 +120,33 @@ private[spark] class ByteBufferBlockData(
private[spark] class HostLocalDirManager(
futureExecutionContext: ExecutionContext,
cacheSize: Int,
- externalBlockStoreClient: ExternalBlockStoreClient,
- host: String,
- externalShuffleServicePort: Int) extends Logging {
+ blockStoreClient: BlockStoreClient) extends Logging {
private val executorIdToLocalDirsCache =
CacheBuilder
.newBuilder()
.maximumSize(cacheSize)
.build[String, Array[String]]()
- private[spark] def getCachedHostLocalDirs()
- : scala.collection.Map[String, Array[String]] =
executorIdToLocalDirsCache.synchronized {
- import scala.collection.JavaConverters._
- return executorIdToLocalDirsCache.asMap().asScala
- }
+ private[spark] def getCachedHostLocalDirs: Map[String, Array[String]] =
+ executorIdToLocalDirsCache.synchronized {
+ executorIdToLocalDirsCache.asMap().asScala.toMap
+ }
private[spark] def getHostLocalDirs(
+ host: String,
+ port: Int,
executorIds: Array[String])(
- callback: Try[java.util.Map[String, Array[String]]] => Unit): Unit = {
+ callback: Try[Map[String, Array[String]]] => Unit): Unit = {
Review comment:
It's required by `fetchMultipleHostLocalBlocks`. Actually, we could
also do the Jave to Scala map conversion before calling
`fetchMultipleHostLocalBlocks` but leaving `Try[java.util.Map[String,
Array[String]]] => Unit` unchanged.
But I decided to do the conversion here just because this class already
imported `scala.collection.JavaConverters._`. It has no big difference to do
the conversion here or there.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]