Repository: spark
Updated Branches:
  refs/heads/master a56516fc9 -> 9d1181776


[SPARK-8607] SparkR -- jars not being added to application classpath correctly

Add `getStaticClass` method in SparkR's `RBackendHandler`

This is a fix for the problem referenced in 
[SPARK-5185](https://issues.apache.org/jira/browse/SPARK-5185).

cc shivaram

Author: cafreeman <cfree...@alteryx.com>

Closes #7001 from cafreeman/branch-1.4 and squashes the following commits:

8f81194 [cafreeman] Add missing license
31aedcf [cafreeman] Refactor test to call an external R script
2c22073 [cafreeman] Merge branch 'branch-1.4' of github.com:apache/spark into 
branch-1.4
0bea809 [cafreeman] Fixed relative path issue and added smaller JAR
ee25e60 [cafreeman] Merge branch 'branch-1.4' of github.com:apache/spark into 
branch-1.4
9a5c362 [cafreeman] test for including JAR when launching sparkContext
9101223 [cafreeman] Merge branch 'branch-1.4' of github.com:apache/spark into 
branch-1.4
5a80844 [cafreeman] Fix style nits
7c6bd0c [cafreeman] [SPARK-8607] SparkR

(cherry picked from commit 2579948bf5d89ac2d822ace605a6a4afce5258d6)
Signed-off-by: Shivaram Venkataraman <shiva...@cs.berkeley.edu>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9d118177
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9d118177
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9d118177

Branch: refs/heads/master
Commit: 9d11817765e2817b11b73c61bae3b32c9f119cfd
Parents: a56516f
Author: cafreeman <cfree...@alteryx.com>
Authored: Fri Jun 26 17:06:02 2015 -0700
Committer: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Committed: Fri Jun 26 17:06:16 2015 -0700

----------------------------------------------------------------------
 .../inst/test_support/sparktestjar_2.10-1.0.jar | Bin 0 -> 2886 bytes
 R/pkg/inst/tests/jarTest.R                      |  32 ++++++++++++++++
 R/pkg/inst/tests/test_includeJAR.R              |  37 +++++++++++++++++++
 .../apache/spark/api/r/RBackendHandler.scala    |  17 ++++++++-
 4 files changed, 85 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9d118177/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar
----------------------------------------------------------------------
diff --git a/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar 
b/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar
new file mode 100644
index 0000000..1d5c2af
Binary files /dev/null and b/R/pkg/inst/test_support/sparktestjar_2.10-1.0.jar 
differ

http://git-wip-us.apache.org/repos/asf/spark/blob/9d118177/R/pkg/inst/tests/jarTest.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/jarTest.R b/R/pkg/inst/tests/jarTest.R
new file mode 100644
index 0000000..d68bb20
--- /dev/null
+++ b/R/pkg/inst/tests/jarTest.R
@@ -0,0 +1,32 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+library(SparkR)
+
+sc <- sparkR.init()
+
+helloTest <- SparkR:::callJStatic("sparkR.test.hello",
+                                  "helloWorld",
+                                  "Dave")
+
+basicFunction <- SparkR:::callJStatic("sparkR.test.basicFunction",
+                                      "addStuff",
+                                      2L,
+                                      2L)
+
+sparkR.stop()
+output <- c(helloTest, basicFunction)
+writeLines(output)

http://git-wip-us.apache.org/repos/asf/spark/blob/9d118177/R/pkg/inst/tests/test_includeJAR.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/test_includeJAR.R 
b/R/pkg/inst/tests/test_includeJAR.R
new file mode 100644
index 0000000..8bc693b
--- /dev/null
+++ b/R/pkg/inst/tests/test_includeJAR.R
@@ -0,0 +1,37 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+context("include an external JAR in SparkContext")
+
+runScript <- function() {
+  sparkHome <- Sys.getenv("SPARK_HOME")
+  jarPath <- paste("--jars",
+                   shQuote(file.path(sparkHome, 
"R/lib/SparkR/test_support/sparktestjar_2.10-1.0.jar")))
+  scriptPath <- file.path(sparkHome, "R/lib/SparkR/tests/jarTest.R")
+  submitPath <- file.path(sparkHome, "bin/spark-submit")
+  res <- system2(command = submitPath,
+                 args = c(jarPath, scriptPath),
+                 stdout = TRUE)
+  tail(res, 2)
+}
+
+test_that("sparkJars tag in SparkContext", {
+  testOutput <- runScript()
+  helloTest <- testOutput[1]
+  expect_true(helloTest == "Hello, Dave")
+  basicFunction <- testOutput[2]
+  expect_true(basicFunction == 4L)
+})

http://git-wip-us.apache.org/repos/asf/spark/blob/9d118177/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala 
b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
index 2e86984..4b8f7fe 100644
--- a/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
+++ b/core/src/main/scala/org/apache/spark/api/r/RBackendHandler.scala
@@ -88,6 +88,21 @@ private[r] class RBackendHandler(server: RBackend)
     ctx.close()
   }
 
+  // Looks up a class given a class name. This function first checks the
+  // current class loader and if a class is not found, it looks up the class
+  // in the context class loader. Address [SPARK-5185]
+  def getStaticClass(objId: String): Class[_] = {
+    try {
+      val clsCurrent = Class.forName(objId)
+      clsCurrent
+    } catch {
+      // Use contextLoader if we can't find the JAR in the system class loader
+      case e: ClassNotFoundException =>
+        val clsContext = Class.forName(objId, true, 
Thread.currentThread().getContextClassLoader)
+        clsContext
+      }
+    }
+
   def handleMethodCall(
       isStatic: Boolean,
       objId: String,
@@ -98,7 +113,7 @@ private[r] class RBackendHandler(server: RBackend)
     var obj: Object = null
     try {
       val cls = if (isStatic) {
-        Class.forName(objId)
+        getStaticClass(objId)
       } else {
         JVMObjectTracker.get(objId) match {
           case None => throw new IllegalArgumentException("Object not found " 
+ objId)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to