zhouyejoe commented on a change in pull request #32007:
URL: https://github.com/apache/spark/pull/32007#discussion_r648745059
##########
File path: core/src/main/scala/org/apache/spark/storage/DiskBlockManager.scala
##########
@@ -153,6 +189,59 @@ private[spark] class DiskBlockManager(conf: SparkConf,
deleteFilesOnStop: Boolea
}
}
+ /**
+ * Get the list of configured local dirs storing merged shuffle blocks
created by executors
+ * if push based shuffle is enabled. Note that the files in this directory
will be created
+ * by the external shuffle services. We only create the merge_manager
directories and
+ * subdirectories here because currently the shuffle service doesn't have
permission to
+ * create directories under application local directories.
+ */
+ private def createLocalDirsForMergedShuffleBlocks(conf: SparkConf):
Option[Array[File]] = {
+ if (Utils.isPushBasedShuffleEnabled(conf)) {
+ // Will create the merge_manager directory only if it doesn't exist
under any local dir.
+ val localDirs = Utils.getConfiguredLocalDirs(conf)
+ var mergeDirCreated = false;
+ for (rootDir <- localDirs) {
+ val mergeDir = new File(rootDir, MERGE_MANAGER_DIR)
+ if (mergeDir.exists()) {
+ logDebug(s"Not creating $mergeDir as it already exists")
+ mergeDirCreated = true
+ }
+ }
Review comment:
Updated the code as described above. So there is no more need to forloop
and check if any dir contains the merge_dir.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]