Repository: zeppelin
Updated Branches:
  refs/heads/branch-0.6 6ad31609f -> f2b46fedb


[ZEPPELIN-605] Add support for Scala 2.11

Enable Zeppelin to be built with both Scala 2.10
and Scala 2.11, mostly to start supporting interpreters
that are moving to Scala 2.11 only such as Spark.

Before testing this PR, one would need to [build Spark 1.6.1 for example with 
Scala 
2.11](http://spark.apache.org/docs/latest/building-spark.html#building-for-scala-211)
 and [build Flink 1.0 with Scala 
2.11](https://ci.apache.org/projects/flink/flink-docs-master/setup/building.html#scala-versions)

Author: Luciano Resende <[email protected]>
Author: Lee moon soo <[email protected]>

Closes #747 from lresende/scala-210-211 and squashes the following commits:

b9bdf86 [Luciano Resende] Properly invoke createTempDir from spark utils
c208e69 [Luciano Resende] Fix class reference
87f46de [Luciano Resende] Force build
6e5e5ad [Luciano Resende] Refactor utility methods to helper class
4e2237a [Luciano Resende] Update readme to use profile to build scala 2.11 and 
match CI
dd79443 [Luciano Resende] Minor formatting change to force build
de4fc10 [Luciano Resende] Minor change to force build
9194218 [Lee moon soo] initialize imain
cbf84c7 [Luciano Resende] Force Scala 2.11 profile to be called
98790a6 [Luciano Resende] Remove obsolete/commented config
6e4f7b0 [Luciano Resende] Force scala-library dependency version based on scala
a3d0525 [Luciano Resende] Fix new code to support both scala versions
e068593 [Luciano Resende] Fix pom.xml merge conflict
736d055 [Lee moon soo] make binary built with scala 2.11 work with spark_2.10 
binary
74d8a62 [Luciano Resende] Force close
9f5d2a2 [Lee moon soo] Remove unused methods
fc9e8a0 [Lee moon soo] Update ignite interpreter
6d3e7e2 [Lee moon soo] Update FlinkInterpreter
6b9ff1d [Lee moon soo] SparkContext sharing seems not working in scala 2.11, 
disable the test
9424769 [Lee moon soo] style
2ec51a3 [Lee moon soo] Fix reflection
c999a2d [Lee moon soo] fix style
dfe6e83 [Lee moon soo] Fix reflection around HttpServer and createTempDir
222e4e7 [Lee moon soo] Fix reflection on creating SparkCommandLine
112ae7d [Lee moon soo] Fix some reflections
b9e0e1e [Lee moon soo] scala 2.11 support for spark interpreter
c88348d [Lee moon soo] Initial scala-210, 211 support in the single binary
5c47d9a [Luciano Resende] [ZEPPELIN-605] Rewrite Spark interpreter based on 
Scala 2.11 support
a73b68d [Luciano Resende] [ZEPPELIN-605] Enable Scala 2.11 REPL support for 
Spark Interpreter
175be7a [Luciano Resende] [ZEPPELIN-605] Add Scala 2.11 build profile
82eaefa [Luciano Resende] [ZEPPELIN-605] Add support for Scala 2.11

(cherry picked from commit bd714c2b96d28b9b6e1b2c71431ace99e5e963ec)
Signed-off-by: Lee moon soo <[email protected]>


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/f2b46fed
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/f2b46fed
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/f2b46fed

Branch: refs/heads/branch-0.6
Commit: f2b46fedbd0fd030aad86b45f3e526c753b8a60c
Parents: 6ad3160
Author: Luciano Resende <[email protected]>
Authored: Wed Jul 13 10:39:18 2016 -0700
Committer: Lee moon soo <[email protected]>
Committed: Sat Jul 16 06:09:31 2016 +0900

----------------------------------------------------------------------
 .travis.yml                                     |   8 +-
 README.md                                       |   8 +
 cassandra/pom.xml                               |   4 +-
 flink/pom.xml                                   |  31 +-
 .../apache/zeppelin/flink/FlinkInterpreter.java |  30 +-
 ignite/pom.xml                                  |  10 +-
 .../zeppelin/ignite/IgniteInterpreter.java      |  18 +-
 pom.xml                                         |  62 +++-
 r/pom.xml                                       |  33 +-
 scalding/pom.xml                                |  16 +-
 spark-dependencies/pom.xml                      |  10 +-
 spark/pom.xml                                   |  10 +-
 .../apache/zeppelin/spark/DepInterpreter.java   |  96 ++++--
 .../zeppelin/spark/PySparkInterpreter.java      |   2 -
 .../apache/zeppelin/spark/SparkInterpreter.java | 334 ++++++++++++++-----
 .../zeppelin/spark/SparkSqlInterpreter.java     |   2 -
 .../org/apache/zeppelin/spark/SparkVersion.java |   6 +-
 .../java/org/apache/zeppelin/spark/Utils.java   |  92 +++++
 .../spark/dep/SparkDependencyResolver.java      |  28 +-
 .../zeppelin/spark/SparkInterpreterTest.java    |  24 +-
 .../spark/dep/SparkDependencyResolverTest.java  |   1 -
 zeppelin-display/pom.xml                        |  27 +-
 zeppelin-distribution/pom.xml                   |  45 +++
 zeppelin-server/pom.xml                         |  46 ++-
 .../zeppelin/rest/AbstractTestRestApi.java      |  27 ++
 25 files changed, 734 insertions(+), 236 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index 7fa8e15..12e10b9 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,9 +33,13 @@ addons:
 
 matrix:
   include:
-    # Test all modules
+    # Test all modules with scala 2.10
     - jdk: "oraclejdk7"
-      env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr 
-Phadoop-2.3 -Ppyspark -Psparkr -Pscalding" BUILD_FLAG="package -Pbuild-distr" 
TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS=""
+      env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr 
-Phadoop-2.3 -Ppyspark -Psparkr -Pscalding" BUILD_FLAG="package -Dscala-2.10 
-Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" 
TEST_PROJECTS="-Dpython.test.exclude=''"
+
+    # Test all modules with scala 2.11
+    - jdk: "oraclejdk7"
+      env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr 
-Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pscala-2.11" BUILD_FLAG="package 
-Dscala-2.11 -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" 
TEST_PROJECTS="-Dpython.test.exclude=''"
 
     # Test spark module for 1.5.2
     - jdk: "oraclejdk7"

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
index 131463b..045e907 100644
--- a/README.md
+++ b/README.md
@@ -259,6 +259,14 @@ And browse [localhost:8080](localhost:8080) in your 
browser.
 
 For configuration details check __`./conf`__ subdirectory.
 
+### Building for Scala 2.11
+
+To produce a Zeppelin package compiled with Scala 2.11, use the -Pscala-2.11 
profile:
+
+```
+mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -Ppyspark -Pscala-2.11 
-DskipTests clean install
+```
+
 ### Package
 To package the final distribution including the compressed archive, run:
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/cassandra/pom.xml
----------------------------------------------------------------------
diff --git a/cassandra/pom.xml b/cassandra/pom.xml
index 80876e2..8eda839 100644
--- a/cassandra/pom.xml
+++ b/cassandra/pom.xml
@@ -38,14 +38,11 @@
         <cassandra.driver.version>3.0.1</cassandra.driver.version>
         <snappy.version>1.0.5.4</snappy.version>
         <lz4.version>1.3.0</lz4.version>
-        <scala.version>2.10.4</scala.version>
-        <scala.binary.version>2.10</scala.binary.version>
         <commons-lang.version>3.3.2</commons-lang.version>
         <scalate.version>1.7.1</scalate.version>
         <cassandra.guava.version>16.0.1</cassandra.guava.version>
 
         <!--TEST-->
-        <scalatest.version>2.2.4</scalatest.version>
         <junit.version>4.12</junit.version>
         <achilles.version>3.2.4-Zeppelin</achilles.version>
         <assertj.version>1.7.0</assertj.version>
@@ -173,6 +170,7 @@
             <plugin>
                 <groupId>org.scala-tools</groupId>
                 <artifactId>maven-scala-plugin</artifactId>
+                <version>2.15.2</version>
                 <executions>
                     <execution>
                         <id>compile</id>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/flink/pom.xml
----------------------------------------------------------------------
diff --git a/flink/pom.xml b/flink/pom.xml
index ca5d3cd..ea8421f 100644
--- a/flink/pom.xml
+++ b/flink/pom.xml
@@ -37,8 +37,6 @@
   <properties>
     <flink.version>1.0.3</flink.version>
     <flink.akka.version>2.3.7</flink.akka.version>
-    <flink.scala.binary.version>2.10</flink.scala.binary.version>
-    <flink.scala.version>2.10.4</flink.scala.version>
     <scala.macros.version>2.0.1</scala.macros.version>
   </properties>
 
@@ -73,68 +71,71 @@
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-clients_${flink.scala.binary.version}</artifactId>
+      <artifactId>flink-clients_${scala.binary.version}</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-runtime_${flink.scala.binary.version}</artifactId>
+      <artifactId>flink-runtime_${scala.binary.version}</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-scala_${flink.scala.binary.version}</artifactId>
+      <artifactId>flink-scala_${scala.binary.version}</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-scala-shell_${flink.scala.binary.version}</artifactId>
+      <artifactId>flink-scala-shell_${scala.binary.version}</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.typesafe.akka</groupId>
-      <artifactId>akka-actor_${flink.scala.binary.version}</artifactId>
+      <artifactId>akka-actor_${scala.binary.version}</artifactId>
       <version>${flink.akka.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.typesafe.akka</groupId>
-      <artifactId>akka-remote_${flink.scala.binary.version}</artifactId>
+      <artifactId>akka-remote_${scala.binary.version}</artifactId>
       <version>${flink.akka.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.typesafe.akka</groupId>
-      <artifactId>akka-slf4j_${flink.scala.binary.version}</artifactId>
+      <artifactId>akka-slf4j_${scala.binary.version}</artifactId>
       <version>${flink.akka.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.typesafe.akka</groupId>
-      <artifactId>akka-testkit_${flink.scala.binary.version}</artifactId>
+      <artifactId>akka-testkit_${scala.binary.version}</artifactId>
       <version>${flink.akka.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-library</artifactId>
-      <version>${flink.scala.version}</version>
+      <version>${scala.version}</version>
+      <scope>provided</scope>
     </dependency>
 
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-compiler</artifactId>
-      <version>${flink.scala.version}</version>
+      <version>${scala.version}</version>
+      <scope>provided</scope>
     </dependency>
 
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-reflect</artifactId>
-      <version>${flink.scala.version}</version>
+      <version>${scala.version}</version>
+      <scope>provided</scope>
     </dependency>
 
     <dependency>
@@ -169,7 +170,7 @@
       <plugin>
        <groupId>net.alchim31.maven</groupId>
        <artifactId>scala-maven-plugin</artifactId>
-       <version>3.1.4</version>
+       <version>3.2.2</version>
        <executions>
          <!-- Run scala compiler in the process-resources phase, so that 
dependencies on
               scala classes can be resolved later in the (Java) compile phase 
-->
@@ -199,7 +200,7 @@
          <compilerPlugins combine.children="append">
            <compilerPlugin>
              <groupId>org.scalamacros</groupId>
-             <artifactId>paradise_${flink.scala.version}</artifactId>
+             <artifactId>paradise_${scala.version}</artifactId>
              <version>${scala.macros.version}</version>
            </compilerPlugin>
          </compilerPlugins>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java 
b/flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
index 68591d7..d3229cf 100644
--- a/flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
+++ b/flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
@@ -17,6 +17,7 @@
  */
 package org.apache.zeppelin.flink;
 
+import java.lang.reflect.InvocationTargetException;
 import java.io.BufferedReader;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
@@ -24,10 +25,7 @@ import java.io.PrintStream;
 import java.io.PrintWriter;
 import java.net.URL;
 import java.net.URLClassLoader;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
+import java.util.*;
 
 import org.apache.flink.api.scala.FlinkILoop;
 import org.apache.flink.configuration.Configuration;
@@ -45,6 +43,8 @@ import org.slf4j.LoggerFactory;
 import scala.Console;
 import scala.None;
 import scala.Some;
+import scala.collection.JavaConversions;
+import scala.collection.immutable.Nil;
 import scala.runtime.AbstractFunction0;
 import scala.tools.nsc.Settings;
 import scala.tools.nsc.interpreter.IMain;
@@ -94,7 +94,7 @@ public class FlinkInterpreter extends Interpreter {
 
     // prepare bindings
     imain.interpret("@transient var _binder = new java.util.HashMap[String, 
Object]()");
-    binder = (Map<String, Object>) getValue("_binder");    
+    Map<String, Object> binder = (Map<String, Object>) getLastObject();
 
     // import libraries
     imain.interpret("import scala.tools.nsc.io._");
@@ -103,7 +103,10 @@ public class FlinkInterpreter extends Interpreter {
     
     imain.interpret("import org.apache.flink.api.scala._");
     imain.interpret("import org.apache.flink.api.common.functions._");
-    imain.bindValue("env", env);
+
+    binder.put("env", env);
+    imain.interpret("val env = _binder.get(\"env\").asInstanceOf["
+        + env.getClass().getName() + "]");
   }
 
   private boolean localMode() {
@@ -192,16 +195,11 @@ public class FlinkInterpreter extends Interpreter {
     return paths;
   }
 
-  public Object getValue(String name) {
-    IMain imain = flinkIloop.intp();
-    Object ret = imain.valueOfTerm(name);
-    if (ret instanceof None) {
-      return null;
-    } else if (ret instanceof Some) {
-      return ((Some) ret).get();
-    } else {
-      return ret;
-    }
+  public Object getLastObject() {
+    Object obj = imain.lastRequest().lineRep().call(
+        "$result",
+        JavaConversions.asScalaBuffer(new LinkedList<Object>()));
+    return obj;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/ignite/pom.xml
----------------------------------------------------------------------
diff --git a/ignite/pom.xml b/ignite/pom.xml
index d1e53cd..ade1320 100644
--- a/ignite/pom.xml
+++ b/ignite/pom.xml
@@ -33,9 +33,7 @@
   <url>http://zeppelin.apache.org</url>
 
   <properties>
-    <ignite.version>1.6.0</ignite.version>
-    <ignite.scala.binary.version>2.10</ignite.scala.binary.version>
-    <ignite.scala.version>2.10.4</ignite.scala.version>
+    <ignite.version>1.5.0.final</ignite.version>
   </properties>
 
   <dependencies>
@@ -73,19 +71,19 @@
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-library</artifactId>
-      <version>${ignite.scala.version}</version>
+      <version>${scala.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-compiler</artifactId>
-      <version>${ignite.scala.version}</version>
+      <version>${scala.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-reflect</artifactId>
-      <version>${ignite.scala.version}</version>
+      <version>${scala.version}</version>
     </dependency>
 
     <dependency>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java
----------------------------------------------------------------------
diff --git 
a/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java 
b/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java
index 8368195..d54152a 100644
--- a/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java
+++ b/ignite/src/main/java/org/apache/zeppelin/ignite/IgniteInterpreter.java
@@ -44,6 +44,7 @@ import java.util.Properties;
 import scala.Console;
 import scala.None;
 import scala.Some;
+import scala.collection.JavaConversions;
 import scala.tools.nsc.Settings;
 import scala.tools.nsc.interpreter.IMain;
 import scala.tools.nsc.interpreter.Results.Result;
@@ -173,16 +174,11 @@ public class IgniteInterpreter extends Interpreter {
     return paths;
   }
 
-  public Object getValue(String name) {
-    Object val = imain.valueOfTerm(name);
-
-    if (val instanceof None) {
-      return null;
-    } else if (val instanceof Some) {
-      return ((Some) val).get();
-    } else {
-      return val;
-    }
+  public Object getLastObject() {
+    Object obj = imain.lastRequest().lineRep().call(
+        "$result",
+        JavaConversions.asScalaBuffer(new LinkedList<Object>()));
+    return obj;
   }
 
   private Ignite getIgnite() {
@@ -221,7 +217,7 @@ public class IgniteInterpreter extends Interpreter {
 
   private void initIgnite() {
     imain.interpret("@transient var _binder = new java.util.HashMap[String, 
Object]()");
-    Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
+    Map<String, Object> binder = (Map<String, Object>) getLastObject();
 
     if (getIgnite() != null) {
       binder.put("ignite", ignite);

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 62d67fa..85c69e2 100755
--- a/pom.xml
+++ b/pom.xml
@@ -17,7 +17,7 @@
   -->
 
 <project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.maven-v4_0_0.xsd";>
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
 
   <modelVersion>4.0.0</modelVersion>
 
@@ -79,6 +79,11 @@
   </modules>
 
   <properties>
+    <scala.version>2.10.5</scala.version>
+    <scala.binary.version>2.10</scala.binary.version>
+    <scalatest.version>2.2.4</scalatest.version>
+    <scalacheck.version>1.12.5</scalacheck.version>
+
     <slf4j.version>1.7.10</slf4j.version>
     <log4j.version>1.2.17</log4j.version>
     <libthrift.version>0.9.2</libthrift.version>
@@ -93,7 +98,6 @@
   <dependencyManagement>
     <dependencies>
       <!-- Logging -->
-
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
@@ -136,7 +140,6 @@
         <version>2.5</version>
       </dependency>
 
-
       <dependency>
         <groupId>com.google.code.gson</groupId>
         <artifactId>gson</artifactId>
@@ -155,14 +158,12 @@
         <version>1.5</version>
       </dependency>
 
-
       <dependency>
         <groupId>commons-io</groupId>
         <artifactId>commons-io</artifactId>
         <version>2.4</version>
       </dependency>
 
-
       <dependency>
         <groupId>commons-collections</groupId>
         <artifactId>commons-collections</artifactId>
@@ -181,7 +182,6 @@
         <version>${guava.version}</version>
       </dependency>
 
-
       <dependency>
         <groupId>junit</groupId>
         <artifactId>junit</artifactId>
@@ -388,12 +388,25 @@
         </executions>
       </plugin>
 
-      <!--TODO(alex): make part of the build and reconcile conflicts <plugin>
-        <groupId>com.ning.maven.plugins</groupId> 
<artifactId>maven-duplicate-finder-plugin</artifactId>
-        <version>1.0.4</version> <executions> <execution> <id>default</id> 
<phase>verify</phase>
-        <goals> <goal>check</goal> </goals> </execution> </executions> 
<configuration>
-        <failBuildInCaseOfConflict>true</failBuildInCaseOfConflict> 
</configuration>
-        </plugin> -->
+      <!--TODO(alex): make part of the build and reconcile conflicts
+    <plugin> 
+      <groupId>com.ning.maven.plugins</groupId>
+      <artifactId>maven-duplicate-finder-plugin</artifactId>
+      <version>1.0.4</version>
+      <executions>
+        <execution>
+          <id>default</id>
+          <phase>verify</phase>
+          <goals>
+            <goal>check</goal> 
+          </goals> 
+       </execution> 
+      </executions> 
+      <configuration>
+        <failBuildInCaseOfConflict>true</failBuildInCaseOfConflict> 
+      </configuration>
+    </plugin>
+    -->
     </plugins>
 
     <pluginManagement>
@@ -412,7 +425,7 @@
               <exclude>.github/*</exclude>
               <exclude>.gitignore</exclude>
               <exclude>.repository/</exclude>
-             <exclude>.Rhistory</exclude>
+                 <exclude>.Rhistory</exclude>
               <exclude>**/*.diff</exclude>
               <exclude>**/*.patch</exclude>
               <exclude>**/*.avsc</exclude>
@@ -636,6 +649,28 @@
 
   <profiles>
     <profile>
+      <id>scala-2.10</id>
+      <activation>
+        <property><name>!scala-2.11</name></property>
+      </activation>
+      <properties>
+        <scala.version>2.10.5</scala.version>
+        <scala.binary.version>2.10</scala.binary.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>scala-2.11</id>
+      <activation>
+        <property><name>scala-2.11</name></property>
+      </activation>
+      <properties>
+        <scala.version>2.11.7</scala.version>
+        <scala.binary.version>2.11</scala.binary.version>
+      </properties>
+    </profile>
+
+    <profile>
       <id>vendor-repo</id>
       <repositories>
         <repository>
@@ -703,7 +738,6 @@
       <activation>
         <activeByDefault>false</activeByDefault>
       </activation>
-
       <build>
         <plugins>
           <plugin>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/r/pom.xml
----------------------------------------------------------------------
diff --git a/r/pom.xml b/r/pom.xml
index afa4893..f90192c 100644
--- a/r/pom.xml
+++ b/r/pom.xml
@@ -36,8 +36,6 @@
     <script.extension>.sh</script.extension>
     <path.separator>/</path.separator>
     <spark.version>1.4.1</spark.version>
-    <scala.version>2.10.4</scala.version>
-    <scala.binary.version>2.10</scala.binary.version>
   </properties>
 
   <dependencies>
@@ -118,13 +116,13 @@
     <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <version>2.2.4</version>
+      <version>${scalatest.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
       <groupId>org.scalacheck</groupId>
       <artifactId>scalacheck_${scala.binary.version}</artifactId>
-      <version>1.12.5</version>
+      <version>${scalacheck.version}</version>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -376,4 +374,31 @@
       </plugin>
     </plugins>
   </build>
+
+
+  <profiles>
+    <profile>
+      <id>scala-2.10</id>
+      <activation>
+        <property><name>!scala-2.11</name></property>
+      </activation>
+      <properties>
+        <spark.version>1.6.1</spark.version>
+        <extra.source.dir>src/main/scala-2.10</extra.source.dir>
+        <extra.testsource.dir>src/test/scala-2.10</extra.testsource.dir>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>scala-2.11</id>
+      <activation>
+        <property><name>scala-2.11</name></property>
+      </activation>
+      <properties>
+        <spark.version>1.6.1</spark.version>
+        <extra.source.dir>src/main/scala-2.11</extra.source.dir>
+        <extra.testsource.dir>src/test/scala/scala-2.11</extra.testsource.dir>
+      </properties>
+    </profile>
+  </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/scalding/pom.xml
----------------------------------------------------------------------
diff --git a/scalding/pom.xml b/scalding/pom.xml
index 7fc4852..78b5f8e 100644
--- a/scalding/pom.xml
+++ b/scalding/pom.xml
@@ -34,7 +34,6 @@
   <url>http://zeppelin.apache.org</url>
 
   <properties>
-    <scala.version>2.11.8</scala.version>
     <hadoop.version>2.6.0</hadoop.version>
     <scalding.version>0.16.1-RC1</scalding.version>
   </properties>
@@ -74,43 +73,43 @@
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-core_2.11</artifactId>
+      <artifactId>scalding-core_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-args_2.11</artifactId>
+      <artifactId>scalding-args_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-date_2.11</artifactId>
+      <artifactId>scalding-date_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-commons_2.11</artifactId>
+      <artifactId>scalding-commons_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-avro_2.11</artifactId>
+      <artifactId>scalding-avro_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-parquet_2.11</artifactId>
+      <artifactId>scalding-parquet_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
     <dependency>
       <groupId>com.twitter</groupId>
-      <artifactId>scalding-repl_2.11</artifactId>
+      <artifactId>scalding-repl_${scala.binary.version}</artifactId>
       <version>${scalding.version}</version>
     </dependency>
 
@@ -199,6 +198,7 @@
       <plugin>
         <groupId>org.scala-tools</groupId>
         <artifactId>maven-scala-plugin</artifactId>
+        <version>2.15.2</version>
         <executions>
           <execution>
             <id>compile</id>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark-dependencies/pom.xml
----------------------------------------------------------------------
diff --git a/spark-dependencies/pom.xml b/spark-dependencies/pom.xml
index b20e288..2b2f612 100644
--- a/spark-dependencies/pom.xml
+++ b/spark-dependencies/pom.xml
@@ -37,8 +37,6 @@
 
   <properties>
     <spark.version>1.4.1</spark.version>
-    <scala.version>2.10.4</scala.version>
-    <scala.binary.version>2.10</scala.binary.version>
 
     <hadoop.version>2.3.0</hadoop.version>
     <yarn.version>${hadoop.version}</yarn.version>
@@ -347,6 +345,14 @@
 
   <profiles>
     <profile>
+      <id>scala-2.11</id>
+      <properties>
+        <spark.version>1.6.1</spark.version>
+        
<spark.download.url>http://archive.apache.org/dist/spark/spark-${spark.version}/spark-${spark.version}.tgz</spark.download.url>
+      </properties>
+    </profile>
+
+    <profile>
       <id>spark-1.1</id>
       <dependencies>
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/pom.xml
----------------------------------------------------------------------
diff --git a/spark/pom.xml b/spark/pom.xml
index 86aa9b7..324ebe1 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -39,8 +39,6 @@
     <mockito.version>1.10.19</mockito.version>
     <powermock.version>1.6.4</powermock.version>
     <spark.version>1.4.1</spark.version>
-    <scala.version>2.10.4</scala.version>
-    <scala.binary.version>2.10</scala.binary.version>
   </properties>
 
   <dependencies>
@@ -54,11 +52,11 @@
       <artifactId>slf4j-log4j12</artifactId>
     </dependency>
 
-    <dependency>
+    <!-- dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>zeppelin-display</artifactId>
       <version>${project.version}</version>
-    </dependency>
+    </dependency -->
 
     <dependency>
       <groupId>${project.groupId}</groupId>
@@ -243,7 +241,7 @@
     <dependency>
       <groupId>org.scalatest</groupId>
       <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <version>2.2.4</version>
+      <version>${scalatest.version}</version>
       <scope>test</scope>
     </dependency>
 
@@ -405,6 +403,7 @@
       <plugin>
         <groupId>org.scala-tools</groupId>
         <artifactId>maven-scala-plugin</artifactId>
+        <version>2.15.2</version>
         <executions>
           <execution>
             <id>compile</id>
@@ -433,7 +432,6 @@
   </build>
 
   <profiles>
-
     <!-- to deactivate 'exclude-sparkr' automatically when 'spark' is 
activated -->
     <profile>
       <id>sparkr</id>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
index 28c5885..5dc5d03 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/DepInterpreter.java
@@ -21,21 +21,22 @@ import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.PrintStream;
 import java.io.PrintWriter;
-import java.lang.reflect.Type;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.URLClassLoader;
-import java.util.*;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
 
 import com.google.common.reflect.TypeToken;
 import com.google.gson.Gson;
 import org.apache.spark.repl.SparkILoop;
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.repl.SparkJLineCompletion;
 import org.apache.zeppelin.interpreter.Interpreter;
 import org.apache.zeppelin.interpreter.InterpreterContext;
 import org.apache.zeppelin.interpreter.InterpreterGroup;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterResult.Code;
 import org.apache.zeppelin.interpreter.WrappedInterpreter;
@@ -51,9 +52,12 @@ import scala.Console;
 import scala.None;
 import scala.Some;
 import scala.collection.convert.WrapAsJava$;
+import scala.collection.JavaConversions;
 import scala.tools.nsc.Settings;
 import scala.tools.nsc.interpreter.Completion.Candidates;
 import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
+import scala.tools.nsc.interpreter.IMain;
+import scala.tools.nsc.interpreter.Results;
 import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
 import scala.tools.nsc.settings.MutableSettings.PathSetting;
 
@@ -64,10 +68,17 @@ import scala.tools.nsc.settings.MutableSettings.PathSetting;
  *
  */
 public class DepInterpreter extends Interpreter {
-  private SparkIMain intp;
+  /**
+   * intp - org.apache.spark.repl.SparkIMain (scala 2.10)
+   * intp - scala.tools.nsc.interpreter.IMain; (scala 2.11)
+   */
+  private Object intp;
   private ByteArrayOutputStream out;
   private SparkDependencyContext depc;
-  private SparkJLineCompletion completor;
+  /**
+   * completor - org.apache.spark.repl.SparkJLineCompletion (scala 2.10)
+   */
+  private Object completor;
   private SparkILoop interpreter;
   static final Logger LOGGER = LoggerFactory.getLogger(DepInterpreter.class);
 
@@ -103,7 +114,7 @@ public class DepInterpreter extends Interpreter {
   @Override
   public void close() {
     if (intp != null) {
-      intp.close();
+      Utils.invokeMethod(intp, "close");
     }
   }
 
@@ -149,31 +160,53 @@ public class DepInterpreter extends Interpreter {
     b.v_$eq(true);
     
settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);
 
-    interpreter = new SparkILoop(null, new PrintWriter(out));
+    interpreter = new SparkILoop((java.io.BufferedReader) null, new 
PrintWriter(out));
     interpreter.settings_$eq(settings);
 
     interpreter.createInterpreter();
 
 
-    intp = interpreter.intp();
-    intp.setContextClassLoader();
-    intp.initializeSynchronous();
+    intp = Utils.invokeMethod(interpreter, "intp");
+
+    if (Utils.isScala2_10()) {
+      Utils.invokeMethod(intp, "setContextClassLoader");
+      Utils.invokeMethod(intp, "initializeSynchronous");
+    }
 
     depc = new SparkDependencyContext(getProperty("zeppelin.dep.localrepo"),
                                  
getProperty("zeppelin.dep.additionalRemoteRepository"));
-    completor = new SparkJLineCompletion(intp);
-    intp.interpret("@transient var _binder = new java.util.HashMap[String, 
Object]()");
-    Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
+    if (Utils.isScala2_10()) {
+      completor = Utils.instantiateClass(
+          "org.apache.spark.repl.SparkJLineCompletion",
+          new Class[]{Utils.findClass("org.apache.spark.repl.SparkIMain")},
+          new Object[]{intp});
+    }
+    interpret("@transient var _binder = new java.util.HashMap[String, 
Object]()");
+    Map<String, Object> binder;
+    if (Utils.isScala2_10()) {
+      binder = (Map<String, Object>) getValue("_binder");
+    } else {
+      binder = (Map<String, Object>) getLastObject();
+    }
     binder.put("depc", depc);
 
-    intp.interpret("@transient val z = "
+    interpret("@transient val z = "
         + "_binder.get(\"depc\")"
         + 
".asInstanceOf[org.apache.zeppelin.spark.dep.SparkDependencyContext]");
 
   }
 
+  private Results.Result interpret(String line) {
+    return (Results.Result) Utils.invokeMethod(
+        intp,
+        "interpret",
+        new Class[] {String.class},
+        new Object[] {line});
+  }
+
   public Object getValue(String name) {
-    Object ret = intp.valueOfTerm(name);
+    Object ret = Utils.invokeMethod(
+      intp, "valueOfTerm", new Class[]{String.class}, new Object[]{name});
     if (ret instanceof None) {
       return null;
     } else if (ret instanceof Some) {
@@ -183,6 +216,13 @@ public class DepInterpreter extends Interpreter {
     }
   }
 
+  public Object getLastObject() {
+    IMain.Request r = (IMain.Request) Utils.invokeMethod(intp, "lastRequest");
+    Object obj = r.lineRep().call("$result",
+        JavaConversions.asScalaBuffer(new LinkedList<Object>()));
+    return obj;
+  }
+
   @Override
   public InterpreterResult interpret(String st, InterpreterContext context) {
     PrintStream printStream = new PrintStream(out);
@@ -198,7 +238,7 @@ public class DepInterpreter extends Interpreter {
           "restart Zeppelin/Interpreter" );
     }
 
-    scala.tools.nsc.interpreter.Results.Result ret = intp.interpret(st);
+    scala.tools.nsc.interpreter.Results.Result ret = interpret(st);
     Code code = getResultCode(ret);
 
     try {
@@ -245,17 +285,21 @@ public class DepInterpreter extends Interpreter {
 
   @Override
   public List<InterpreterCompletion> completion(String buf, int cursor) {
-    ScalaCompleter c = completor.completer();
-    Candidates ret = c.complete(buf, cursor);
+    if (Utils.isScala2_10()) {
+      ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completor, 
"completer");
+      Candidates ret = c.complete(buf, cursor);
 
-    List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
-    List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
+      List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
+      List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
 
-    for (String candidate : candidates) {
-      completions.add(new InterpreterCompletion(candidate, candidate));
-    }
+      for (String candidate : candidates) {
+        completions.add(new InterpreterCompletion(candidate, candidate));
+      }
 
-    return completions;
+      return completions;
+    } else {
+      return new LinkedList<InterpreterCompletion>();
+    }
   }
 
   private List<File> currentClassPath() {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
index df9db43..c827dc2 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/PySparkInterpreter.java
@@ -48,8 +48,6 @@ import org.apache.spark.sql.SQLContext;
 import org.apache.zeppelin.interpreter.Interpreter;
 import org.apache.zeppelin.interpreter.InterpreterContext;
 import org.apache.zeppelin.interpreter.InterpreterException;
-import org.apache.zeppelin.interpreter.InterpreterGroup;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterResult.Code;
 import org.apache.zeppelin.interpreter.LazyOpenInterpreter;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index 6783378..ba7f1ec 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -19,10 +19,15 @@ package org.apache.zeppelin.spark;
 
 import java.io.File;
 import java.io.PrintWriter;
-import java.lang.reflect.*;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.*;
+import java.util.List;
+import java.util.Map;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import com.google.common.base.Joiner;
@@ -33,10 +38,9 @@ import org.apache.spark.HttpServer;
 import org.apache.spark.SparkConf;
 import org.apache.spark.SparkContext;
 import org.apache.spark.SparkEnv;
-import org.apache.spark.repl.SparkCommandLine;
+
+import org.apache.spark.SecurityManager;
 import org.apache.spark.repl.SparkILoop;
-import org.apache.spark.repl.SparkIMain;
-import org.apache.spark.repl.SparkJLineCompletion;
 import org.apache.spark.scheduler.ActiveJob;
 import org.apache.spark.scheduler.DAGScheduler;
 import org.apache.spark.scheduler.Pool;
@@ -45,7 +49,6 @@ import org.apache.spark.ui.jobs.JobProgressListener;
 import org.apache.zeppelin.interpreter.Interpreter;
 import org.apache.zeppelin.interpreter.InterpreterContext;
 import org.apache.zeppelin.interpreter.InterpreterException;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterResult.Code;
 import org.apache.zeppelin.interpreter.InterpreterUtils;
@@ -70,9 +73,12 @@ import scala.collection.convert.WrapAsScala;
 import scala.collection.mutable.HashMap;
 import scala.collection.mutable.HashSet;
 import scala.reflect.io.AbstractFile;
+import scala.tools.nsc.Global;
 import scala.tools.nsc.Settings;
 import scala.tools.nsc.interpreter.Completion.Candidates;
 import scala.tools.nsc.interpreter.Completion.ScalaCompleter;
+import scala.tools.nsc.interpreter.IMain;
+import scala.tools.nsc.interpreter.Results;
 import scala.tools.nsc.settings.MutableSettings;
 import scala.tools.nsc.settings.MutableSettings.BooleanSetting;
 import scala.tools.nsc.settings.MutableSettings.PathSetting;
@@ -86,7 +92,11 @@ public class SparkInterpreter extends Interpreter {
 
   private ZeppelinContext z;
   private SparkILoop interpreter;
-  private SparkIMain intp;
+  /**
+   * intp - org.apache.spark.repl.SparkIMain (scala 2.10)
+   * intp - scala.tools.nsc.interpreter.IMain; (scala 2.11)
+   */
+  private Object intp;
   private static SparkContext sc;
   private static SQLContext sqlc;
   private static SparkEnv env;
@@ -97,10 +107,16 @@ public class SparkInterpreter extends Interpreter {
 
   private SparkOutputStream out;
   private SparkDependencyResolver dep;
-  private SparkJLineCompletion completor;
+
+  /**
+   * completor - org.apache.spark.repl.SparkJLineCompletion (scala 2.10)
+   */
+  private Object completor;
 
   private Map<String, Object> binder;
   private SparkVersion sparkVersion;
+  private File outputDir;          // class outputdir for scala 2.11
+  private HttpServer classServer;  // classserver for scala 2.11
 
 
   public SparkInterpreter(Properties property) {
@@ -207,12 +223,15 @@ public class SparkInterpreter extends Interpreter {
     }
   }
 
+
   public SparkDependencyResolver getDependencyResolver() {
     if (dep == null) {
-      dep = new SparkDependencyResolver(intp,
-                                   sc,
-                                   getProperty("zeppelin.dep.localrepo"),
-                                   
getProperty("zeppelin.dep.additionalRemoteRepository"));
+      dep = new SparkDependencyResolver(
+          (Global) Utils.invokeMethod(intp, "global"),
+          (ClassLoader) Utils.invokeMethod(Utils.invokeMethod(intp, 
"classLoader"), "getParent"),
+          sc,
+          getProperty("zeppelin.dep.localrepo"),
+          getProperty("zeppelin.dep.additionalRemoteRepository"));
     }
     return dep;
   }
@@ -233,13 +252,20 @@ public class SparkInterpreter extends Interpreter {
     logger.info("------ Create new SparkContext {} -------", 
getProperty("master"));
 
     String execUri = System.getenv("SPARK_EXECUTOR_URI");
-    String[] jars = SparkILoop.getAddedJars();
+    String[] jars = null;
+
+    if (Utils.isScala2_10()) {
+      jars = (String[]) Utils.invokeStaticMethod(SparkILoop.class, 
"getAddedJars");
+    } else {
+      jars = (String[]) Utils.invokeStaticMethod(
+              findClass("org.apache.spark.repl.Main"), "getAddedJars");
+    }
 
     String classServerUri = null;
 
     try { // in case of spark 1.1x, spark 1.2x
-      Method classServer = 
interpreter.intp().getClass().getMethod("classServer");
-      HttpServer httpServer = (HttpServer) 
classServer.invoke(interpreter.intp());
+      Method classServer = intp.getClass().getMethod("classServer");
+      HttpServer httpServer = (HttpServer) classServer.invoke(intp);
       classServerUri = httpServer.uri();
     } catch (NoSuchMethodException | SecurityException | IllegalAccessException
         | IllegalArgumentException | InvocationTargetException e) {
@@ -248,8 +274,8 @@ public class SparkInterpreter extends Interpreter {
 
     if (classServerUri == null) {
       try { // for spark 1.3x
-        Method classServer = 
interpreter.intp().getClass().getMethod("classServerUri");
-        classServerUri = (String) classServer.invoke(interpreter.intp());
+        Method classServer = intp.getClass().getMethod("classServerUri");
+        classServerUri = (String) classServer.invoke(intp);
       } catch (NoSuchMethodException | SecurityException | 
IllegalAccessException
           | IllegalArgumentException | InvocationTargetException e) {
         // continue instead of: throw new InterpreterException(e);
@@ -259,6 +285,13 @@ public class SparkInterpreter extends Interpreter {
       }
     }
 
+
+    if (Utils.isScala2_11()) {
+      classServer = createHttpServer(outputDir);
+      classServer.start();
+      classServerUri = classServer.uri();
+    }
+
     SparkConf conf =
         new SparkConf()
             .setMaster(getProperty("master"))
@@ -390,17 +423,49 @@ public class SparkInterpreter extends Interpreter {
      * getClass.getClassLoader >> } >> in.setContextClassLoader()
      */
     Settings settings = new Settings();
-    if (getProperty("args") != null) {
-      String[] argsArray = getProperty("args").split(" ");
-      LinkedList<String> argList = new LinkedList<String>();
-      for (String arg : argsArray) {
-        argList.add(arg);
+
+    // process args
+    String args = getProperty("args");
+    if (args == null) {
+      args = "";
+    }
+
+    String[] argsArray = args.split(" ");
+    LinkedList<String> argList = new LinkedList<String>();
+    for (String arg : argsArray) {
+      argList.add(arg);
+    }
+
+    if (Utils.isScala2_10()) {
+      scala.collection.immutable.List<String> list =
+          JavaConversions.asScalaBuffer(argList).toList();
+
+      Object sparkCommandLine = Utils.instantiateClass(
+          "org.apache.spark.repl.SparkCommandLine",
+          new Class[]{ scala.collection.immutable.List.class },
+          new Object[]{ list });
+
+      settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
+    } else {
+      String sparkReplClassDir = getProperty("spark.repl.classdir");
+      if (sparkReplClassDir == null) {
+        sparkReplClassDir = System.getProperty("spark.repl.classdir");
+      }
+      if (sparkReplClassDir == null) {
+        sparkReplClassDir = System.getProperty("java.io.tmpdir");
       }
 
-      SparkCommandLine command =
-          new SparkCommandLine(scala.collection.JavaConversions.asScalaBuffer(
-              argList).toList());
-      settings = command.settings();
+      outputDir = createTempDir(sparkReplClassDir);
+
+      argList.add("-Yrepl-class-based");
+      argList.add("-Yrepl-outdir");
+      argList.add(outputDir.getAbsolutePath());
+
+
+      scala.collection.immutable.List<String> list =
+          JavaConversions.asScalaBuffer(argList).toList();
+
+      settings.processArguments(list, true);
     }
 
     // set classpath for scala compiler
@@ -479,36 +544,41 @@ public class SparkInterpreter extends Interpreter {
     synchronized (sharedInterpreterLock) {
       /* create scala repl */
       if (printREPLOutput()) {
-        this.interpreter = new SparkILoop(null, new PrintWriter(out));
+        this.interpreter = new SparkILoop((java.io.BufferedReader) null, new 
PrintWriter(out));
       } else {
-        this.interpreter = new SparkILoop(null, new PrintWriter(Console.out(), 
false));
+        this.interpreter = new SparkILoop((java.io.BufferedReader) null,
+            new PrintWriter(Console.out(), false));
       }
 
       interpreter.settings_$eq(settings);
 
       interpreter.createInterpreter();
 
-      intp = interpreter.intp();
-      intp.setContextClassLoader();
-      intp.initializeSynchronous();
-
-      if (classOutputDir == null) {
-        classOutputDir = settings.outputDirs().getSingleOutput().get();
-      } else {
-        // change SparkIMain class output dir
-        settings.outputDirs().setSingleOutput(classOutputDir);
-        ClassLoader cl = intp.classLoader();
+      intp = Utils.invokeMethod(interpreter, "intp");
+      Utils.invokeMethod(intp, "setContextClassLoader");
+      Utils.invokeMethod(intp, "initializeSynchronous");
 
-        try {
-          Field rootField = 
cl.getClass().getSuperclass().getDeclaredField("root");
-          rootField.setAccessible(true);
-          rootField.set(cl, classOutputDir);
-        } catch (NoSuchFieldException | IllegalAccessException e) {
-          logger.error(e.getMessage(), e);
+      if (Utils.isScala2_10()) {
+        if (classOutputDir == null) {
+          classOutputDir = settings.outputDirs().getSingleOutput().get();
+        } else {
+          // change SparkIMain class output dir
+          settings.outputDirs().setSingleOutput(classOutputDir);
+          ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, 
"classLoader");
+          try {
+            Field rootField = 
cl.getClass().getSuperclass().getDeclaredField("root");
+            rootField.setAccessible(true);
+            rootField.set(cl, classOutputDir);
+          } catch (NoSuchFieldException | IllegalAccessException e) {
+            logger.error(e.getMessage(), e);
+          }
         }
-      }
 
-      completor = new SparkJLineCompletion(intp);
+        completor = Utils.instantiateClass(
+            "SparkJLineCompletion",
+            new Class[]{findClass("org.apache.spark.repl.SparkIMain")},
+            new Object[]{intp});
+      }
 
       sc = getSparkContext();
       if (sc.getPoolForName("fair").isEmpty()) {
@@ -528,29 +598,34 @@ public class SparkInterpreter extends Interpreter {
       z = new ZeppelinContext(sc, sqlc, null, dep,
               Integer.parseInt(getProperty("zeppelin.spark.maxResult")));
 
-      intp.interpret("@transient var _binder = new java.util.HashMap[String, 
Object]()");
-      binder = (Map<String, Object>) getValue("_binder");
+      interpret("@transient val _binder = new java.util.HashMap[String, 
Object]()");
+      Map<String, Object> binder;
+      if (Utils.isScala2_10()) {
+        binder = (Map<String, Object>) getValue("_binder");
+      } else {
+        binder = (Map<String, Object>) getLastObject();
+      }
       binder.put("sc", sc);
       binder.put("sqlc", sqlc);
       binder.put("z", z);
 
-      intp.interpret("@transient val z = "
+      interpret("@transient val z = "
               + 
"_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.ZeppelinContext]");
-      intp.interpret("@transient val sc = "
+      interpret("@transient val sc = "
               + 
"_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
-      intp.interpret("@transient val sqlc = "
+      interpret("@transient val sqlc = "
               + 
"_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
-      intp.interpret("@transient val sqlContext = "
+      interpret("@transient val sqlContext = "
               + 
"_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
-      intp.interpret("import org.apache.spark.SparkContext._");
+      interpret("import org.apache.spark.SparkContext._");
 
       if (importImplicit()) {
         if (sparkVersion.oldSqlContextImplicits()) {
-          intp.interpret("import sqlContext._");
+          interpret("import sqlContext._");
         } else {
-          intp.interpret("import sqlContext.implicits._");
-          intp.interpret("import sqlContext.sql");
-          intp.interpret("import org.apache.spark.sql.functions._");
+          interpret("import sqlContext.implicits._");
+          interpret("import sqlContext.sql");
+          interpret("import org.apache.spark.sql.functions._");
         }
       }
     }
@@ -566,18 +641,20 @@ public class SparkInterpreter extends Interpreter {
             Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
      */
 
-    try {
-      if (sparkVersion.oldLoadFilesMethodName()) {
-        Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", 
Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
-      } else {
-        Method loadFiles = this.interpreter.getClass().getMethod(
-                "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
-        loadFiles.invoke(this.interpreter, settings);
+    if (Utils.isScala2_10()) {
+      try {
+        if (sparkVersion.oldLoadFilesMethodName()) {
+          Method loadFiles = 
this.interpreter.getClass().getMethod("loadFiles", Settings.class);
+          loadFiles.invoke(this.interpreter, settings);
+        } else {
+          Method loadFiles = this.interpreter.getClass().getMethod(
+              "org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
+          loadFiles.invoke(this.interpreter, settings);
+        }
+      } catch (NoSuchMethodException | SecurityException | 
IllegalAccessException
+          | IllegalArgumentException | InvocationTargetException e) {
+        throw new InterpreterException(e);
       }
-    } catch (NoSuchMethodException | SecurityException | IllegalAccessException
-            | IllegalArgumentException | InvocationTargetException e) {
-      throw new InterpreterException(e);
     }
 
     // add jar from DepInterpreter
@@ -621,6 +698,14 @@ public class SparkInterpreter extends Interpreter {
     numReferenceOfSparkContext.incrementAndGet();
   }
 
+  private Results.Result interpret(String line) {
+    return (Results.Result) Utils.invokeMethod(
+        intp,
+        "interpret",
+        new Class[] {String.class},
+        new Object[] {line});
+  }
+
   private List<File> currentClassPath() {
     List<File> paths = 
classPath(Thread.currentThread().getContextClassLoader());
     String[] cps = 
System.getProperty("java.class.path").split(File.pathSeparator);
@@ -660,17 +745,22 @@ public class SparkInterpreter extends Interpreter {
       completionText = "";
       cursor = completionText.length();
     }
-    ScalaCompleter c = completor.completer();
-    Candidates ret = c.complete(completionText, cursor);
+    if (Utils.isScala2_10()) {
+      ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completor, 
"completor");
+      Candidates ret = c.complete(completionText, cursor);
 
-    List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
-    List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
+      List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
+      List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
 
-    for (String candidate : candidates) {
-      completions.add(new InterpreterCompletion(candidate, candidate));
+      for (String candidate : candidates) {
+        completions.add(new InterpreterCompletion(candidate, candidate));
+      }
+
+      return completions;
+    } else {
+      return new LinkedList<InterpreterCompletion>();
     }
 
-    return completions;
   }
 
   private String getCompletionTargetString(String text, int cursor) {
@@ -714,9 +804,15 @@ public class SparkInterpreter extends Interpreter {
     return resultCompletionText;
   }
 
+  /*
+   * this method doesn't work in scala 2.11
+   * Somehow intp.valueOfTerm returns scala.None always with 
-Yrepl-class-based option
+   */
   public Object getValue(String name) {
-    Object ret = intp.valueOfTerm(name);
-    if (ret instanceof None) {
+    Object ret = Utils.invokeMethod(
+            intp, "valueOfTerm", new Class[]{String.class}, new 
Object[]{name});
+
+    if (ret instanceof None || ret instanceof scala.None$) {
       return null;
     } else if (ret instanceof Some) {
       return ((Some) ret).get();
@@ -725,6 +821,13 @@ public class SparkInterpreter extends Interpreter {
     }
   }
 
+  public Object getLastObject() {
+    IMain.Request r = (IMain.Request) Utils.invokeMethod(intp, "lastRequest");
+    Object obj = r.lineRep().call("$result",
+        JavaConversions.asScalaBuffer(new LinkedList<Object>()));
+    return obj;
+  }
+
   String getJobGroup(InterpreterContext context){
     return "zeppelin-" + context.getParagraphId();
   }
@@ -807,7 +910,7 @@ public class SparkInterpreter extends Interpreter {
 
       scala.tools.nsc.interpreter.Results.Result res = null;
       try {
-        res = intp.interpret(incomplete + s);
+        res = interpret(incomplete + s);
       } catch (Exception e) {
         sc.clearJobGroup();
         out.setInterpreterOutput(null);
@@ -828,6 +931,13 @@ public class SparkInterpreter extends Interpreter {
       }
     }
 
+    // make sure code does not finish with comment
+    if (r == Code.INCOMPLETE) {
+      scala.tools.nsc.interpreter.Results.Result res = null;
+      res = interpret(incomplete + "\nprint(\"\")");
+      r = getResultCode(res);
+    }
+
     if (r == Code.INCOMPLETE) {
       sc.clearJobGroup();
       out.setInterpreterOutput(null);
@@ -839,7 +949,6 @@ public class SparkInterpreter extends Interpreter {
     }
   }
 
-
   @Override
   public void cancel(InterpreterContext context) {
     sc.cancelJobGroup(getJobGroup(context));
@@ -975,9 +1084,13 @@ public class SparkInterpreter extends Interpreter {
     if (numReferenceOfSparkContext.decrementAndGet() == 0) {
       sc.stop();
       sc = null;
+      if (classServer != null) {
+        classServer.stop();
+        classServer = null;
+      }
     }
 
-    intp.close();
+    Utils.invokeMethod(intp, "close");
   }
 
   @Override
@@ -1002,4 +1115,67 @@ public class SparkInterpreter extends Interpreter {
   public SparkVersion getSparkVersion() {
     return sparkVersion;
   }
+
+
+
+  private Class findClass(String name) {
+    try {
+      return this.getClass().forName(name);
+    } catch (ClassNotFoundException e) {
+      logger.error(e.getMessage(), e);
+      return null;
+    }
+  }
+
+  private File createTempDir(String dir) {
+    File file = null;
+
+    // try Utils.createTempDir()
+    file = (File) Utils.invokeStaticMethod(
+      Utils.findClass("org.apache.spark.util.Utils"),
+      "createTempDir",
+      new Class[]{String.class, String.class},
+      new Object[]{dir, "spark"});
+
+    // fallback to old method
+    if (file == null) {
+      file = (File) Utils.invokeStaticMethod(
+        Utils.findClass("org.apache.spark.util.Utils"),
+        "createTempDir",
+        new Class[]{String.class},
+        new Object[]{dir});
+    }
+
+    return file;
+  }
+
+  private HttpServer createHttpServer(File outputDir) {
+    SparkConf conf = new SparkConf();
+    try {
+      // try to create HttpServer
+      Constructor<?> constructor = getClass().getClassLoader()
+          .loadClass(HttpServer.class.getName())
+          .getConstructor(new Class[]{
+            SparkConf.class, File.class, SecurityManager.class, int.class, 
String.class});
+
+      return (HttpServer) constructor.newInstance(new Object[] {
+        conf, outputDir, new SecurityManager(conf), 0, "HTTP Server"});
+    } catch (ClassNotFoundException | NoSuchMethodException | 
IllegalAccessException |
+        InstantiationException | InvocationTargetException e) {
+      // fallback to old constructor
+      Constructor<?> constructor = null;
+      try {
+        constructor = getClass().getClassLoader()
+            .loadClass(HttpServer.class.getName())
+            .getConstructor(new Class[]{
+              File.class, SecurityManager.class, int.class, String.class});
+        return (HttpServer) constructor.newInstance(new Object[] {
+          outputDir, new SecurityManager(conf), 0, "HTTP Server"});
+      } catch (ClassNotFoundException | NoSuchMethodException | 
IllegalAccessException |
+          InstantiationException | InvocationTargetException e1) {
+        logger.error(e1.getMessage(), e1);
+        return null;
+      }
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
index a3636a2..fc8923c 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java
@@ -27,9 +27,7 @@ import org.apache.spark.SparkContext;
 import org.apache.spark.sql.SQLContext;
 import org.apache.zeppelin.interpreter.Interpreter;
 import org.apache.zeppelin.interpreter.InterpreterContext;
-import org.apache.zeppelin.interpreter.InterpreterGroup;
 import org.apache.zeppelin.interpreter.InterpreterException;
-import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
 import org.apache.zeppelin.interpreter.InterpreterResult;
 import org.apache.zeppelin.interpreter.InterpreterResult.Code;
 import org.apache.zeppelin.interpreter.LazyOpenInterpreter;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkVersion.java 
b/spark/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
index 2fa716b..17f2de7 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkVersion.java
@@ -32,10 +32,12 @@ public class SparkVersion {
   public static final SparkVersion SPARK_1_4_0 = 
SparkVersion.fromVersionString("1.4.0");
   public static final SparkVersion SPARK_1_5_0 = 
SparkVersion.fromVersionString("1.5.0");
   public static final SparkVersion SPARK_1_6_0 = 
SparkVersion.fromVersionString("1.6.0");
-  public static final SparkVersion SPARK_1_7_0 = 
SparkVersion.fromVersionString("1.7.0");
+
+  public static final SparkVersion SPARK_2_0_0 = 
SparkVersion.fromVersionString("2.0.0");
+  public static final SparkVersion SPARK_2_1_0 = 
SparkVersion.fromVersionString("2.1.0");
 
   public static final SparkVersion MIN_SUPPORTED_VERSION =  SPARK_1_0_0;
-  public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_1_7_0;
+  public static final SparkVersion UNSUPPORTED_FUTURE_VERSION = SPARK_2_1_0;
 
   private int version;
   private String versionString;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java 
b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
new file mode 100644
index 0000000..940e202
--- /dev/null
+++ b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+
+/**
+ * Utility and helper functions for the Spark Interpreter
+ */
+class Utils {
+  public static Logger logger = LoggerFactory.getLogger(Utils.class);
+
+  static Object invokeMethod(Object o, String name) {
+    return invokeMethod(o, name, new Class[]{}, new Object[]{});
+  }
+
+  static Object invokeMethod(Object o, String name, Class[] argTypes, Object[] 
params) {
+    try {
+      return o.getClass().getMethod(name, argTypes).invoke(o, params);
+    } catch (NoSuchMethodException | IllegalAccessException | 
InvocationTargetException e) {
+      logger.error(e.getMessage(), e);
+    }
+    return null;
+  }
+
+  static Object invokeStaticMethod(Class c, String name, Class[] argTypes, 
Object[] params) {
+    try {
+      return c.getMethod(name, argTypes).invoke(null, params);
+    } catch (NoSuchMethodException | InvocationTargetException | 
IllegalAccessException e) {
+      logger.error(e.getMessage(), e);
+    }
+    return null;
+  }
+
+  static Object invokeStaticMethod(Class c, String name) {
+    return invokeStaticMethod(c, name, new Class[]{}, new Object[]{});
+  }
+
+  static Class findClass(String name) {
+    try {
+      return Utils.class.forName(name);
+    } catch (ClassNotFoundException e) {
+      logger.error(e.getMessage(), e);
+      return null;
+    }
+  }
+
+  static Object instantiateClass(String name, Class[] argTypes, Object[] 
params) {
+    try {
+      Constructor<?> constructor = Utils.class.getClassLoader()
+              .loadClass(name).getConstructor(argTypes);
+      return constructor.newInstance(params);
+    } catch (NoSuchMethodException | ClassNotFoundException | 
IllegalAccessException |
+      InstantiationException | InvocationTargetException e) {
+      logger.error(e.getMessage(), e);
+    }
+    return null;
+  }
+
+  // function works after intp is initialized
+  static boolean isScala2_10() {
+    try {
+      Utils.class.forName("org.apache.spark.repl.SparkIMain");
+      return true;
+    } catch (ClassNotFoundException e) {
+      return false;
+    }
+  }
+
+  static boolean isScala2_11() {
+    return !isScala2_10();
+  }
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/main/java/org/apache/zeppelin/spark/dep/SparkDependencyResolver.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/dep/SparkDependencyResolver.java
 
b/spark/src/main/java/org/apache/zeppelin/spark/dep/SparkDependencyResolver.java
index e4881d3..c404797 100644
--- 
a/spark/src/main/java/org/apache/zeppelin/spark/dep/SparkDependencyResolver.java
+++ 
b/spark/src/main/java/org/apache/zeppelin/spark/dep/SparkDependencyResolver.java
@@ -29,7 +29,6 @@ import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.spark.SparkContext;
-import org.apache.spark.repl.SparkIMain;
 import org.apache.zeppelin.dep.AbstractDependencyResolver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -60,7 +59,7 @@ import scala.tools.nsc.util.MergedClassPath;
 public class SparkDependencyResolver extends AbstractDependencyResolver {
   Logger logger = LoggerFactory.getLogger(SparkDependencyResolver.class);
   private Global global;
-  private SparkIMain intp;
+  private ClassLoader runtimeClassLoader;
   private SparkContext sc;
 
   private final String[] exclusions = new String[] 
{"org.scala-lang:scala-library",
@@ -71,11 +70,14 @@ public class SparkDependencyResolver extends 
AbstractDependencyResolver {
                                                     
"org.apache.zeppelin:zeppelin-spark",
                                                     
"org.apache.zeppelin:zeppelin-server"};
 
-  public SparkDependencyResolver(SparkIMain intp, SparkContext sc, String 
localRepoPath,
-                            String additionalRemoteRepository) {
+  public SparkDependencyResolver(Global global,
+                                 ClassLoader runtimeClassLoader,
+                                 SparkContext sc,
+                                 String localRepoPath,
+                                 String additionalRemoteRepository) {
     super(localRepoPath);
-    this.intp = intp;
-    this.global = intp.global();
+    this.global = global;
+    this.runtimeClassLoader = runtimeClassLoader;
     this.sc = sc;
     addRepoFromProperty(additionalRemoteRepository);
   }
@@ -127,24 +129,22 @@ public class SparkDependencyResolver extends 
AbstractDependencyResolver {
   private void updateRuntimeClassPath_1_x(URL[] urls) throws SecurityException,
       IllegalAccessException, IllegalArgumentException,
       InvocationTargetException, NoSuchMethodException {
-    ClassLoader cl = intp.classLoader().getParent();
     Method addURL;
-    addURL = cl.getClass().getDeclaredMethod("addURL", new Class[] 
{URL.class});
+    addURL = runtimeClassLoader.getClass().getDeclaredMethod("addURL", new 
Class[] {URL.class});
     addURL.setAccessible(true);
     for (URL url : urls) {
-      addURL.invoke(cl, url);
+      addURL.invoke(runtimeClassLoader, url);
     }
   }
 
   private void updateRuntimeClassPath_2_x(URL[] urls) throws SecurityException,
       IllegalAccessException, IllegalArgumentException,
       InvocationTargetException, NoSuchMethodException {
-    ClassLoader cl = intp.classLoader().getParent();
     Method addURL;
-    addURL = cl.getClass().getDeclaredMethod("addNewUrl", new Class[] 
{URL.class});
+    addURL = runtimeClassLoader.getClass().getDeclaredMethod("addNewUrl", new 
Class[] {URL.class});
     addURL.setAccessible(true);
     for (URL url : urls) {
-      addURL.invoke(cl, url);
+      addURL.invoke(runtimeClassLoader, url);
     }
   }
 
@@ -209,7 +209,7 @@ public class SparkDependencyResolver extends 
AbstractDependencyResolver {
   private void loadFromFs(String artifact, boolean addSparkContext) throws 
Exception {
     File jarFile = new File(artifact);
 
-    intp.global().new Run();
+    global.new Run();
 
     if (sc.version().startsWith("1.1")) {
       updateRuntimeClassPath_1_x(new URL[] {jarFile.toURI().toURL()});
@@ -257,7 +257,7 @@ public class SparkDependencyResolver extends 
AbstractDependencyResolver {
           + artifactResult.getArtifact().getVersion());
     }
 
-    intp.global().new Run();
+    global.new Run();
     if (sc.version().startsWith("1.1")) {
       updateRuntimeClassPath_1_x(newClassPathList.toArray(new URL[0]));
     } else {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index eb8d876..c20b268 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -19,15 +19,15 @@ package org.apache.zeppelin.spark;
 
 import static org.junit.Assert.*;
 
+import java.io.BufferedReader;
 import java.io.File;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.Properties;
 
-import org.apache.spark.HttpServer;
-import org.apache.spark.SecurityManager;
 import org.apache.spark.SparkConf;
 import org.apache.spark.SparkContext;
+import org.apache.spark.repl.SparkILoop;
 import org.apache.zeppelin.display.AngularObjectRegistry;
 import org.apache.zeppelin.user.AuthenticationInfo;
 import org.apache.zeppelin.display.GUI;
@@ -40,6 +40,7 @@ import org.junit.Test;
 import org.junit.runners.MethodSorters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import scala.tools.nsc.interpreter.IMain;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class SparkInterpreterTest {
@@ -138,6 +139,7 @@ public class SparkInterpreterTest {
     assertEquals(InterpreterResult.Code.INCOMPLETE, incomplete.code());
     assertTrue(incomplete.message().length() > 0); // expecting some error
                                                    // message
+
     /*
      * assertEquals(1, repl.getValue("a")); assertEquals(2, 
repl.getValue("b"));
      * repl.interpret("val ver = sc.version");
@@ -181,15 +183,15 @@ public class SparkInterpreterTest {
 
 
     if (getSparkVersionNumber() <= 11) { // spark 1.2 or later does not allow 
create multiple SparkContext in the same jvm by default.
-    // create new interpreter
-    Properties p = new Properties();
-    SparkInterpreter repl2 = new SparkInterpreter(p);
-    repl2.open();
-
-    repl.interpret("case class Man(name:String, age:Int)", context);
-    repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), 
Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
-    assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", context).code());
-    repl2.getSparkContext().stop();
+      // create new interpreter
+      Properties p = new Properties();
+      SparkInterpreter repl2 = new SparkInterpreter(p);
+      repl2.open();
+
+      repl.interpret("case class Man(name:String, age:Int)", context);
+      repl.interpret("val man = sc.parallelize(Seq(Man(\"moon\", 33), 
Man(\"jobs\", 51), Man(\"gates\", 51), Man(\"park\", 34)))", context);
+      assertEquals(Code.SUCCESS, repl.interpret("man.take(3)", 
context).code());
+      repl2.getSparkContext().stop();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/spark/src/test/java/org/apache/zeppelin/spark/dep/SparkDependencyResolverTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/dep/SparkDependencyResolverTest.java
 
b/spark/src/test/java/org/apache/zeppelin/spark/dep/SparkDependencyResolverTest.java
index a0271f4..b226a00 100644
--- 
a/spark/src/test/java/org/apache/zeppelin/spark/dep/SparkDependencyResolverTest.java
+++ 
b/spark/src/test/java/org/apache/zeppelin/spark/dep/SparkDependencyResolverTest.java
@@ -19,7 +19,6 @@ package org.apache.zeppelin.spark.dep;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.zeppelin.spark.dep.SparkDependencyResolver;
 import org.junit.Test;
 
 public class SparkDependencyResolverTest {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/zeppelin-display/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-display/pom.xml b/zeppelin-display/pom.xml
index 39f9f11..5123e7e 100644
--- a/zeppelin-display/pom.xml
+++ b/zeppelin-display/pom.xml
@@ -33,11 +33,6 @@
   <name>Zeppelin: Display system apis</name>
   <url>http://zeppelin.apache.org</url>
 
-  <properties>
-    <scala.version>2.10.4</scala.version>
-    <scala.binary.version>2.10</scala.binary.version>
-  </properties>
-
   <dependencyManagement>
     <dependencies>
       <dependency>
@@ -86,16 +81,34 @@
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-library</artifactId>
+      <version>${scala.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_2.10</artifactId>
-      <version>2.1.1</version>
+      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <version>${scalatest.version}</version>
       <scope>test</scope>
     </dependency>
   </dependencies>
 
+  <profiles>
+    <profile>
+      <id>scala-2.11</id>
+      <activation>
+        <property><name>scala-2.11</name></property>
+      </activation>
+
+      <dependencies>
+        <dependency>
+          <groupId>org.scala-lang.modules</groupId>
+          <artifactId>scala-xml_${scala.binary.version}</artifactId>
+          <version>1.0.2</version>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+
   <build>
     <plugins>
       <plugin>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/zeppelin-distribution/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-distribution/pom.xml b/zeppelin-distribution/pom.xml
index 17a2514..9b615fd 100644
--- a/zeppelin-distribution/pom.xml
+++ b/zeppelin-distribution/pom.xml
@@ -45,6 +45,34 @@
   <!-- NOTE: These dependency declarations are only required to sort this 
project to the
        end of the line in the multimodule build.
     -->
+  <dependencyManagement>
+    <dependencies>
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-library</artifactId>
+        <version>${scala.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-compiler</artifactId>
+        <version>${scala.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-reflect</artifactId>
+        <version>${scala.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scalap</artifactId>
+        <version>${scala.version}</version>
+      </dependency>
+    </dependencies>
+  </dependencyManagement>
+
   <dependencies>
     <dependency>
       <artifactId>zeppelin-server</artifactId>
@@ -85,6 +113,23 @@
 
   <profiles>
     <profile>
+      <id>scala-2.11</id>
+      <activation>
+        <property><name>scala-2.11</name></property>
+      </activation>
+
+      <dependencyManagement>
+        <dependencies>
+          <dependency>
+            <groupId>org.scala-lang.modules</groupId>
+            <artifactId>scala-xml_${scala.binary.version}</artifactId>
+            <version>1.0.2</version>
+          </dependency>
+        </dependencies>
+      </dependencyManagement>
+    </profile>
+
+    <profile>
       <id>publish-distr</id>
       <activation>
         <activeByDefault>false</activeByDefault>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/zeppelin-server/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index e3f1927..75ead2f 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -43,19 +43,25 @@
       <dependency>
         <groupId>org.scala-lang</groupId>
         <artifactId>scala-library</artifactId>
-        <version>2.10.4</version>
+        <version>${scala.version}</version>
       </dependency>
 
       <dependency>
         <groupId>org.scala-lang</groupId>
         <artifactId>scala-compiler</artifactId>
-        <version>2.10.4</version>
+        <version>${scala.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-reflect</artifactId>
+        <version>${scala.version}</version>
       </dependency>
 
       <dependency>
         <groupId>org.scala-lang</groupId>
         <artifactId>scalap</artifactId>
-        <version>2.10.4</version>
+        <version>${scala.version}</version>
       </dependency>
     </dependencies>
   </dependencyManagement>
@@ -221,6 +227,19 @@
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-library</artifactId>
+      <version>${scala.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-compiler</artifactId>
+      <version>${scala.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.scala-lang</groupId>
+      <artifactId>scala-reflect</artifactId>
+      <version>${scala.version}</version>
     </dependency>
 
     <dependency>
@@ -258,8 +277,8 @@
 
     <dependency>
       <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_2.10</artifactId>
-      <version>2.1.1</version>
+      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <version>${scalatest.version}</version>
       <scope>test</scope>
     </dependency>
 
@@ -394,6 +413,23 @@
 
   <profiles>
     <profile>
+      <id>scala-2.11</id>
+      <activation>
+        <property><name>scala-2.11</name></property>
+      </activation>
+
+      <dependencyManagement>
+        <dependencies>
+          <dependency>
+            <groupId>org.scala-lang.modules</groupId>
+            <artifactId>scala-xml_${scala.binary.version}</artifactId>
+            <version>1.0.2</version>
+          </dependency>
+        </dependencies>
+      </dependencyManagement>
+    </profile>
+    
+    <profile>
       <id>using-source-tree</id>
       <activation>
         <activeByDefault>true</activeByDefault>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/f2b46fed/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index 7bedd28..4364349 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -36,6 +36,7 @@ import org.apache.commons.httpclient.methods.GetMethod;
 import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.commons.httpclient.methods.PutMethod;
 import org.apache.commons.httpclient.methods.RequestEntity;
+import org.apache.zeppelin.conf.ZeppelinConfiguration;
 import org.apache.zeppelin.dep.Dependency;
 import org.apache.zeppelin.interpreter.InterpreterGroup;
 import org.apache.zeppelin.interpreter.InterpreterOption;
@@ -99,6 +100,30 @@ public abstract class AbstractTestRestApi {
   protected static void startUp() throws Exception {
     if (!wasRunning) {
       LOG.info("Staring test Zeppelin up...");
+
+
+      // exclude org.apache.zeppelin.rinterpreter.* for scala 2.11 test
+      ZeppelinConfiguration conf = ZeppelinConfiguration.create();
+      String interpreters = 
conf.getString(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETERS);
+      String interpretersCompatibleWithScala211Test = null;
+
+      for (String intp : interpreters.split(",")) {
+        if (intp.startsWith("org.apache.zeppelin.rinterpreter")) {
+          continue;
+        }
+
+        if (interpretersCompatibleWithScala211Test == null) {
+          interpretersCompatibleWithScala211Test = intp;
+        } else {
+          interpretersCompatibleWithScala211Test += "," + intp;
+        }
+      }
+
+      System.setProperty(
+          ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETERS.getVarName(),
+          interpretersCompatibleWithScala211Test);
+
+
       executor = Executors.newSingleThreadExecutor();
       executor.submit(server);
       long s = System.currentTimeMillis();
@@ -238,6 +263,8 @@ public abstract class AbstractTestRestApi {
       }
 
       LOG.info("Test Zeppelin terminated.");
+
+      
System.clearProperty(ZeppelinConfiguration.ConfVars.ZEPPELIN_INTERPRETERS.getVarName());
     }
   }
 

Reply via email to