This is an automated email from the ASF dual-hosted git repository.
zjffdu pushed a commit to branch branch-0.8
in repository https://gitbox.apache.org/repos/asf/zeppelin.git
The following commit(s) were added to refs/heads/branch-0.8 by this push:
new a006aff [ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393
a006aff is described below
commit a006affe46c0d4826aa2ddcb1493d743e2a33a8d
Author: Jeff Zhang <[email protected]>
AuthorDate: Wed Apr 24 17:05:42 2019 +0800
[ZEPPELIN-4132]. Spark Interpreter has issue of SPARK-22393
This PR fix the issue of SPARK-22393 in zeppelin. We can fix this by using
`SparkIMain` instead of `IMain`.
[Bug Fix]
* [ ] - Task
* https://jira.apache.org/jira/browse/ZEPPELIN-4132
* Unit test is added
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No
Author: Jeff Zhang <[email protected]>
Closes #3353 from zjffdu/ZEPPELIN-4132 and squashes the following commits:
c94b34af2 [Jeff Zhang] [ZEPPELIN-4132]. Spark Interpreter has issue of
SPARK-22393
(cherry picked from commit 1ca7039f02c81fdaa41d185c67810fcf197da3a9)
Signed-off-by: Jeff Zhang <[email protected]>
---
.travis.yml | 2 +-
.../java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java | 7 +++++++
.../scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala | 5 +++--
3 files changed, 11 insertions(+), 3 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index aaf4892..e01a715 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -101,7 +101,7 @@ matrix:
- sudo: required
jdk: "oraclejdk8"
dist: trusty
- env: PYTHON="3" SCALA_VER="2.11" PROFILE="-Pspark-2.2 -Pscala-2.11
-Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests
-DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl
zeppelin-server,zeppelin-web,spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
+ env: PYTHON="3" SCALA_VER="2.10" PROFILE="-Pspark-2.2 -Pscala-2.10
-Phadoop2 -Pintegration" SPARKR="true" BUILD_FLAG="install -DskipTests
-DskipRat -am" TEST_FLAG="test -DskipRat -am" MODULES="-pl
zeppelin-server,zeppelin-web,spark/interpreter,spark/spark-dependencies"
TEST_PROJECTS="-Dtest=ZeppelinSparkClusterTest22,SparkIntegrationTest22,org.apache.zeppelin.spark.*
-DfailIfNoTests=false"
# ZeppelinSparkClusterTest21, SparkIntegrationTest21, Unit test of Spark
2.1
- sudo: required
diff --git
a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
index a0d1d67..312839a 100644
---
a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
+++
b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
@@ -159,6 +159,13 @@ public class NewSparkInterpreterTest {
"object Counter {\n def apply(x: Long) = new Counter()\n}",
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+ // class extend
+ result = interpreter.interpret("import java.util.ArrayList",
getInterpreterContext());
+ assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+
+ result = interpreter.interpret("class MyArrayList extends ArrayList{}",
getInterpreterContext());
+ assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+
// spark rdd operation
result = interpreter.interpret("sc.range(1, 10).sum",
getInterpreterContext());
assertEquals(InterpreterResult.Code.SUCCESS, result.code());
diff --git
a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
index 0956e04..8465145 100644
---
a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
+++
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
@@ -22,6 +22,7 @@ import java.net.URLClassLoader
import java.nio.file.{Files, Paths}
import org.apache.spark.SparkConf
+import org.apache.spark.repl.SparkILoop
import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion
import org.apache.zeppelin.interpreter.util.InterpreterOutputStream
import org.apache.zeppelin.interpreter.{InterpreterContext, InterpreterResult}
@@ -43,7 +44,7 @@ class SparkScala211Interpreter(override val conf: SparkConf,
lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
- private var sparkILoop: ILoop = _
+ private var sparkILoop: SparkILoop = _
override val interpreterOutput = new InterpreterOutputStream(LOGGER)
@@ -74,7 +75,7 @@ class SparkScala211Interpreter(override val conf: SparkConf,
} else {
new JPrintWriter(Console.out, true)
}
- sparkILoop = new ILoop(None, replOut)
+ sparkILoop = new SparkILoop(None, replOut)
sparkILoop.settings = settings
sparkILoop.createInterpreter()