AngersZhuuuu commented on a change in pull request #29881:
URL: https://github.com/apache/spark/pull/29881#discussion_r503028007
##########
File path: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
##########
@@ -396,6 +417,93 @@ private[spark] object HiveUtils extends Logging {
config = configurations,
barrierPrefixes = hiveMetastoreBarrierPrefixes,
sharedPrefixes = hiveMetastoreSharedPrefixes)
+ } else if (hiveMetastoreJars == "path") {
+
+ def addLocalHiveJars(file: File): Seq[URL] = {
+ if (file.getName == "*") {
+ val files = file.getParentFile.listFiles()
+ if (files == null) {
+ logWarning(s"Hive jar path '${file.getPath}' does not exist.")
+ Nil
+ } else {
+
files.filter(_.getName.toLowerCase(Locale.ROOT).endsWith(".jar")).map(_.toURL).toSeq
+ }
+ } else {
+ file.toURL :: Nil
+ }
+ }
+
+ def checkRemoteHiveJars(path: String): Seq[URL] = {
+ try {
+ val hadoopPath = new Path(path)
+ val fs = hadoopPath.getFileSystem(hadoopConf)
+ if (hadoopPath.getName == "*") {
+ val parent = hadoopPath.getParent
+ if (!fs.exists(parent)) {
+ logWarning(s"Hive Jar ${path} does not exist.")
+ Nil
+ } else if (!fs.getFileStatus(parent).isDirectory) {
+ logWarning(s"Hive Jar ${parent} is not a directory.")
+ Nil
+ } else {
+ fs.listStatus(parent).map(_.getPath.toUri.toURL)
+ }
+ } else {
+ if (!fs.exists(hadoopPath)) {
+ logWarning(s"Hive Jar ${path} does not exist.")
+ Nil
+ } else if (fs.getFileStatus(hadoopPath).isDirectory) {
+ logWarning(s"Hive Jar ${path} not allow directory without `*`")
+ Nil
+ } else {
+ // Since tar/tar.gz file we can't know it's final path yet, not
support it
+ hadoopPath.toUri.toURL :: Nil
Review comment:
> How are these jars be downloaded eventually? we'll need to call
`FileSystem` API for that later right?
URLClassLoader will handle this, you can see my previous commit, I use
`Utils.fetchFile` to download, but now I remove that.
##########
File path: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
##########
@@ -88,12 +90,23 @@ private[spark] object HiveUtils extends Logging {
| <code>${builtinHiveVersion}</code> or not defined.
| 2. "maven"
| Use Hive jars of specified version downloaded from Maven
repositories.
- | 3. A classpath in the standard format for both Hive and Hadoop.
+ | 3. "path"
+ | Use Hive jars configured by `spark.sql.hive.metastore.jars.path` in
comma separated format
+ | support both local or remote paths.
+ | 4. A classpath in the standard format for both Hive and Hadoop, we
should always
Review comment:
> nit: `we should always` -> `it should always`?
Yea
##########
File path: sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveUtils.scala
##########
@@ -88,12 +90,23 @@ private[spark] object HiveUtils extends Logging {
| <code>${builtinHiveVersion}</code> or not defined.
| 2. "maven"
| Use Hive jars of specified version downloaded from Maven
repositories.
- | 3. A classpath in the standard format for both Hive and Hadoop.
+ | 3. "path"
+ | Use Hive jars configured by `spark.sql.hive.metastore.jars.path` in
comma separated format
+ | support both local or remote paths.
+ | 4. A classpath in the standard format for both Hive and Hadoop, we
should always
+ | be fully qualified URL to indicate other file systems.
""".stripMargin)
.version("1.4.0")
.stringConf
.createWithDefault("builtin")
+ val HIVE_METASTORE_JARS_PATH =
buildStaticConf("spark.sql.hive.metastore.jars.path")
+ .doc(s"Comma separated path of Hive jars, both support local and remote
paths." +
Review comment:
> nit: `both support` -> `support both`.
DOne
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]