AngersZhuuuu commented on a change in pull request #29966:
URL: https://github.com/apache/spark/pull/29966#discussion_r532399083
##########
File path: core/src/main/scala/org/apache/spark/util/DependencyUtils.scala
##########
@@ -25,12 +25,104 @@ import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
+import org.apache.spark.deploy.SparkSubmitUtils
import org.apache.spark.internal.Logging
-import org.apache.spark.util.{MutableURLClassLoader, Utils}
-private[deploy] object DependencyUtils extends Logging {
+private[spark] object DependencyUtils extends Logging {
+
+ def getIvyProperties(): Seq[String] = {
+ Seq(
+ "spark.jars.excludes",
+ "spark.jars.packages",
+ "spark.jars.repositories",
+ "spark.jars.ivy",
+ "spark.jars.ivySettings"
+ ).map(sys.props.get(_).orNull)
+ }
+
+
+ private def parseURLQueryParameter(queryString: String, queryTag: String):
Array[String] = {
+ if (queryString == null || queryString.isEmpty) {
+ Array.empty[String]
+ } else {
+ val mapTokens = queryString.split("&")
+ assert(mapTokens.forall(_.split("=").length == 2)
+ , "Invalid URI query string: [ " + queryString + " ]")
+ mapTokens.map(_.split("=")).map(kv => (kv(0), kv(1))).filter(_._1 ==
queryTag).map(_._2)
+ }
+ }
+
+ /**
+ * Parse excluded list in ivy URL. When download ivy URL jar, Spark won't
download transitive jar
+ * in excluded list.
+ *
+ * @param queryString Ivy URI query part string.
+ * @return Exclude list which contains grape parameters of exclude.
+ * Example: Input:
exclude=org.mortbay.jetty:jetty,org.eclipse.jetty:jetty-http
+ * Output: [org.mortbay.jetty:jetty, org.eclipse.jetty:jetty-http]
+ */
+ private def parseExcludeList(queryString: String): String = {
+ parseURLQueryParameter(queryString, "exclude")
+ .flatMap { excludeString =>
+ val excludes: Array[String] = excludeString.split(",")
+ assert(excludes.forall(_.split(":").length == 2),
Review comment:
> ditto: `IllegalArgumentException`? btw, hive has the same behaviour
with this?
https://github.com/apache/hive/blob/aed7c86cdd59f0b2a4979633fbd191d451f2fd75/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java#L143-L146
##########
File path: core/src/main/scala/org/apache/spark/util/DependencyUtils.scala
##########
@@ -25,12 +25,104 @@ import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
+import org.apache.spark.deploy.SparkSubmitUtils
import org.apache.spark.internal.Logging
-import org.apache.spark.util.{MutableURLClassLoader, Utils}
-private[deploy] object DependencyUtils extends Logging {
+private[spark] object DependencyUtils extends Logging {
+
+ def getIvyProperties(): Seq[String] = {
+ Seq(
+ "spark.jars.excludes",
+ "spark.jars.packages",
+ "spark.jars.repositories",
+ "spark.jars.ivy",
+ "spark.jars.ivySettings"
+ ).map(sys.props.get(_).orNull)
+ }
+
+
+ private def parseURLQueryParameter(queryString: String, queryTag: String):
Array[String] = {
+ if (queryString == null || queryString.isEmpty) {
+ Array.empty[String]
+ } else {
+ val mapTokens = queryString.split("&")
+ assert(mapTokens.forall(_.split("=").length == 2)
Review comment:
> Really `assert`? Rather, `IllegalArgumentException` because the query
string depends on user's input?
Hive logical
https://github.com/apache/hive/blob/aed7c86cdd59f0b2a4979633fbd191d451f2fd75/ql/src/java/org/apache/hadoop/hive/ql/util/DependencyResolver.java#L116-L119
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]