Repository: zeppelin
Updated Branches:
  refs/heads/master 1812928bf -> 1e1b95ea1


ZEPPELIN-2893. Pass non spark.* of spark interpreter properties to spark app 
driver

### What is this PR for?
Straightforward change for allowing pass non spark.* properties to spark app 
driver.  See discussion here 
http://apache-zeppelin-users-incubating-mailing-list.75479.x6.nabble.com/Setting-spark-config-properties-in-Zeppelin-0-7-2-td6143.html

### What type of PR is it?
[Bug Fix ]

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-2893

### How should this be tested?
Unit test is added

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zjf...@apache.org>

Closes #2561 from zjffdu/ZEPPELIN-2893 and squashes the following commits:

1342527 [Jeff Zhang] ZEPPELIN-2893. Pass non spark.* of spark interpreter 
properties to spark app driver


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/1e1b95ea
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/1e1b95ea
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/1e1b95ea

Branch: refs/heads/master
Commit: 1e1b95ea1dc5db408773263837c9cbbd6f66b288
Parents: 1812928
Author: Jeff Zhang <zjf...@apache.org>
Authored: Sun Sep 3 10:39:37 2017 +0800
Committer: Jeff Zhang <zjf...@apache.org>
Committed: Thu Sep 7 06:35:31 2017 +0800

----------------------------------------------------------------------
 docs/interpreter/spark.md                       |  7 +++++
 .../apache/zeppelin/spark/SparkInterpreter.java | 27 +++++++++++++++-----
 .../zeppelin/spark/SparkInterpreterTest.java    |  9 ++++++-
 3 files changed, 36 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1e1b95ea/docs/interpreter/spark.md
----------------------------------------------------------------------
diff --git a/docs/interpreter/spark.md b/docs/interpreter/spark.md
index 8ba9247..780c60a 100644
--- a/docs/interpreter/spark.md
+++ b/docs/interpreter/spark.md
@@ -196,6 +196,13 @@ Staring from 0.6.1 SparkSession is available as variable 
`spark` when you are us
 
 <a name="dependencyloading"> </a>
 
+### How to pass property to SparkConf
+
+There're 2 kinds of properties that would be passed to SparkConf
+
+ * Standard spark property (prefix with `spark.`). e.g. 
`spark.executor.memory` will be passed to `SparkConf`
+ * Non-standard spark property (prefix with `zeppelin.spark.`).  e.g. 
`zeppelin.spark.property_1`, `property_1` will be passed to `SparkConf`
+
 ## Dependency Management
 There are two ways to load external libraries in Spark interpreter. First is 
using interpreter setting menu and second is loading Spark properties.
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1e1b95ea/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index df41014..670314b 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -381,9 +381,16 @@ public class SparkInterpreter extends Interpreter {
     for (Object k : intpProperty.keySet()) {
       String key = (String) k;
       String val = toString(intpProperty.get(key));
-      if (key.startsWith("spark.") && !val.trim().isEmpty()) {
-        logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, 
val));
-        conf.set(key, val);
+      if (!val.trim().isEmpty()) {
+        if (key.startsWith("spark.")) {
+          logger.debug(String.format("SparkConf: key = [%s], value = [%s]", 
key, val));
+          conf.set(key, val);
+        }
+        if (key.startsWith("zeppelin.spark.")) {
+          String sparkPropertyKey = key.substring("zeppelin.spark.".length());
+          logger.debug(String.format("SparkConf: key = [%s], value = [%s]", 
sparkPropertyKey, val));
+          conf.set(sparkPropertyKey, val);
+        }
       }
     }
 
@@ -509,9 +516,17 @@ public class SparkInterpreter extends Interpreter {
     for (Object k : intpProperty.keySet()) {
       String key = (String) k;
       String val = toString(intpProperty.get(key));
-      if (key.startsWith("spark.") && !val.trim().isEmpty()) {
-        logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, 
val));
-        conf.set(key, val);
+      if (!val.trim().isEmpty()) {
+        if (key.startsWith("spark.")) {
+          logger.debug(String.format("SparkConf: key = [%s], value = [%s]", 
key, val));
+          conf.set(key, val);
+        }
+
+        if (key.startsWith("zeppelin.spark.")) {
+          String sparkPropertyKey = key.substring("zeppelin.spark.".length());
+          logger.debug(String.format("SparkConf: key = [%s], value = [%s]", 
sparkPropertyKey, val));
+          conf.set(sparkPropertyKey, val);
+        }
       }
     }
     setupConfForPySpark(conf);

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/1e1b95ea/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index 3a31e5d..a939942 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -78,7 +78,7 @@ public class SparkInterpreterTest {
     p.setProperty("zeppelin.spark.maxResult", "1000");
     p.setProperty("zeppelin.spark.importImplicit", "true");
     p.setProperty("zeppelin.dep.localrepo", 
tmpDir.newFolder().getAbsolutePath());
-
+    p.setProperty("zeppelin.spark.property_1", "value_1");
     return p;
   }
 
@@ -152,6 +152,13 @@ public class SparkInterpreterTest {
   }
 
   @Test
+  public void testNonStandardSparkProperties() throws IOException {
+    // throw NoSuchElementException if no such property is found
+    InterpreterResult result = 
repl.interpret("sc.getConf.get(\"property_1\")", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+  }
+
+  @Test
   public void testNextLineInvocation() {
     assertEquals(InterpreterResult.Code.SUCCESS, 
repl.interpret("\"123\"\n.toInt", context).code());
   }

Reply via email to