Github user vanzin commented on a diff in the pull request:
https://github.com/apache/spark/pull/21251#discussion_r186566903
--- Diff: core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala ---
@@ -1204,7 +1205,36 @@ private[spark] object SparkSubmitUtils {
/** A nice function to use in tests as well. Values are dummy strings. */
def getModuleDescriptor: DefaultModuleDescriptor =
DefaultModuleDescriptor.newDefaultInstance(
- ModuleRevisionId.newInstance("org.apache.spark",
"spark-submit-parent", "1.0"))
+ // Include UUID in module name, so multiple clients resolving maven
coordinate at the same time
+ // do not modify the same resolution file concurrently.
+ ModuleRevisionId.newInstance("org.apache.spark",
+ s"spark-submit-parent-${UUID.randomUUID.toString}",
+ "1.0"))
+
+ /**
+ * clear ivy resolution from current launch. The resolution file is
usually at
+ * ~/.ivy2/org.apache.spark-spark-submit-parent-$UUID-default.xml,
+ * ~/.ivy2/resolved-org.apache.spark-spark-submit-parent-$UUID-1.0.xml,
and
+ *
~/.ivy2/resolved-org.apache.spark-spark-submit-parent-$UUID-1.0.properties.
+ * Since each launch will have its own resolution files created, delete
them after
+ * each resolution to prevent accumulation of these files in the ivy
cache dir.
+ */
+ private def clearIvyResolutionFiles(
+ mdId: ModuleRevisionId,
+ ivySettings: IvySettings,
+ ivyConfName: String): Unit = {
+ val currentResolutionFiles = Seq[File](
+ new File(ivySettings.getDefaultCache,
--- End diff --
nit: you could move `new File(ivySettings.getDefaultCache` to the foreach
loop instead.
---
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]