Repository: spark
Updated Branches:
  refs/heads/master 68f995714 -> 84a27916a


[SPARK-9885] [SQL] Also pass barrierPrefixes and sharedPrefixes to 
IsolatedClientLoader when hiveMetastoreJars is set to maven.

https://issues.apache.org/jira/browse/SPARK-9885

cc marmbrus liancheng

Author: Yin Huai <[email protected]>

Closes #8158 from yhuai/classloaderMaven.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/84a27916
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/84a27916
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/84a27916

Branch: refs/heads/master
Commit: 84a27916a62980c8fcb0977c3a7fdb73c0bd5812
Parents: 68f9957
Author: Yin Huai <[email protected]>
Authored: Thu Aug 13 15:08:57 2015 +0800
Committer: Cheng Lian <[email protected]>
Committed: Thu Aug 13 15:08:57 2015 +0800

----------------------------------------------------------------------
 .../scala/org/apache/spark/sql/hive/HiveContext.scala    |  6 +++++-
 .../spark/sql/hive/client/IsolatedClientLoader.scala     | 11 +++++++++--
 2 files changed, 14 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/84a27916/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index f17177a..1776264 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -231,7 +231,11 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) 
with Logging {
       // TODO: Support for loading the jars from an already downloaded 
location.
       logInfo(
         s"Initializing HiveMetastoreConnection version $hiveMetastoreVersion 
using maven.")
-      IsolatedClientLoader.forVersion(hiveMetastoreVersion, allConfig)
+      IsolatedClientLoader.forVersion(
+        version = hiveMetastoreVersion,
+        config = allConfig,
+        barrierPrefixes = hiveMetastoreBarrierPrefixes,
+        sharedPrefixes = hiveMetastoreSharedPrefixes)
     } else {
       // Convert to files and expand any directories.
       val jars =

http://git-wip-us.apache.org/repos/asf/spark/blob/84a27916/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
index a7d5a99..7856037 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
@@ -42,11 +42,18 @@ private[hive] object IsolatedClientLoader {
   def forVersion(
       version: String,
       config: Map[String, String] = Map.empty,
-      ivyPath: Option[String] = None): IsolatedClientLoader = synchronized {
+      ivyPath: Option[String] = None,
+      sharedPrefixes: Seq[String] = Seq.empty,
+      barrierPrefixes: Seq[String] = Seq.empty): IsolatedClientLoader = 
synchronized {
     val resolvedVersion = hiveVersion(version)
     val files = resolvedVersions.getOrElseUpdate(resolvedVersion,
       downloadVersion(resolvedVersion, ivyPath))
-    new IsolatedClientLoader(hiveVersion(version), files, config)
+    new IsolatedClientLoader(
+      version = hiveVersion(version),
+      execJars = files,
+      config = config,
+      sharedPrefixes = sharedPrefixes,
+      barrierPrefixes = barrierPrefixes)
   }
 
   def hiveVersion(version: String): HiveVersion = version match {


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to