Repository: zeppelin
Updated Branches:
  refs/heads/master b6310ada1 -> 01beb54e9


[ZEPPELIN-1300] Implement SparkInterpreter.completion for scala 2.11

### What is this PR for?
SparkInterpreter misses code completion for scala 2.11
This PR make code completion works with spark 2.0 and scala 2.11

### What type of PR is it?
Improvement

### Todos
* [x] - Implement code completion for scala 2.11
* [x] - add unittest

### What is the Jira issue?
https://issues.apache.org/jira/browse/ZEPPELIN-1300

### How should this be tested?
Unittest included.
Try `ctrl + .` in your %spark paragraph

### Screenshots (if appropriate)
![image](https://cloud.githubusercontent.com/assets/1540981/17455026/703904c2-5b60-11e6-99ed-930cd47691ba.png)

### Questions:
* Does the licenses files need update? no
* Is there breaking changes for older versions? no
* Does this needs documentation? no

Author: Lee moon soo <[email protected]>

Closes #1291 from Leemoonsoo/ZEPPELIN-1300 and squashes the following commits:

7edc209 [Lee moon soo] Support scala 2.11.7 as well as 2.11.8
27c572b [Lee moon soo] add log message when completer not found
909b44b [Lee moon soo] find class silently
9ecbe70 [Lee moon soo] Add create completer based on class existance
0fefc6d [Lee moon soo] Implement SparkInterpreter.completion for scala 2.11


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/01beb54e
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/01beb54e
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/01beb54e

Branch: refs/heads/master
Commit: 01beb54e968d27d69cc138b379c79915d884fdf5
Parents: b6310ad
Author: Lee moon soo <[email protected]>
Authored: Sat Aug 6 11:02:48 2016 -0700
Committer: Lee moon soo <[email protected]>
Committed: Sun Aug 7 08:56:25 2016 -0700

----------------------------------------------------------------------
 .../apache/zeppelin/spark/SparkInterpreter.java | 41 +++++++++++++-------
 .../java/org/apache/zeppelin/spark/Utils.java   |  8 +++-
 .../zeppelin/spark/SparkInterpreterTest.java    | 11 ++++--
 3 files changed, 42 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/01beb54e/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java 
b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
index ff0f14c..f3c45ab 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java
@@ -67,10 +67,8 @@ import scala.Enumeration.Value;
 import scala.collection.Iterator;
 import scala.collection.JavaConversions;
 import scala.collection.JavaConverters;
-import scala.collection.convert.WrapAsJava;
 import scala.collection.Seq;
 import scala.collection.convert.WrapAsJava$;
-import scala.collection.convert.WrapAsScala;
 import scala.collection.mutable.HashMap;
 import scala.collection.mutable.HashSet;
 import scala.reflect.io.AbstractFile;
@@ -114,7 +112,7 @@ public class SparkInterpreter extends Interpreter {
   /**
    * completer - org.apache.spark.repl.SparkJLineCompletion (scala 2.10)
    */
-  private Object completer;
+  private Object completer = null;
 
   private Map<String, Object> binder;
   private SparkVersion sparkVersion;
@@ -720,11 +718,25 @@ public class SparkInterpreter extends Interpreter {
             logger.error(e.getMessage(), e);
           }
         }
+      }
 
+      if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) 
!= null) {
         completer = Utils.instantiateClass(
             "org.apache.spark.repl.SparkJLineCompletion",
             new Class[]{Utils.findClass("org.apache.spark.repl.SparkIMain")},
             new Object[]{intp});
+      } else if (Utils.findClass(
+          "scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) 
!= null) {
+        completer = Utils.instantiateClass(
+            "scala.tools.nsc.interpreter.PresentationCompilerCompleter",
+            new Class[]{ IMain.class },
+            new Object[]{ intp });
+      } else if (Utils.findClass(
+          "scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
+        completer = Utils.instantiateClass(
+            "scala.tools.nsc.interpreter.JLineCompletion",
+            new Class[]{ IMain.class },
+            new Object[]{ intp });
       }
 
       if (Utils.isSpark2()) {
@@ -903,6 +915,11 @@ public class SparkInterpreter extends Interpreter {
 
   @Override
   public List<InterpreterCompletion> completion(String buf, int cursor) {
+    if (completer == null) {
+      logger.warn("Can't find completer");
+      return new LinkedList<InterpreterCompletion>();
+    }
+
     if (buf.length() < cursor) {
       cursor = buf.length();
     }
@@ -911,22 +928,18 @@ public class SparkInterpreter extends Interpreter {
       completionText = "";
       cursor = completionText.length();
     }
-    if (Utils.isScala2_10()) {
-      ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, 
"completer");
-      Candidates ret = c.complete(completionText, cursor);
 
-      List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
-      List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
+    ScalaCompleter c = (ScalaCompleter) Utils.invokeMethod(completer, 
"completer");
+    Candidates ret = c.complete(completionText, cursor);
 
-      for (String candidate : candidates) {
-        completions.add(new InterpreterCompletion(candidate, candidate));
-      }
+    List<String> candidates = 
WrapAsJava$.MODULE$.seqAsJavaList(ret.candidates());
+    List<InterpreterCompletion> completions = new 
LinkedList<InterpreterCompletion>();
 
-      return completions;
-    } else {
-      return new LinkedList<InterpreterCompletion>();
+    for (String candidate : candidates) {
+      completions.add(new InterpreterCompletion(candidate, candidate));
     }
 
+    return completions;
   }
 
   private String getCompletionTargetString(String text, int cursor) {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/01beb54e/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
----------------------------------------------------------------------
diff --git a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java 
b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
index 328fa19..765791e 100644
--- a/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
+++ b/spark/src/main/java/org/apache/zeppelin/spark/Utils.java
@@ -56,10 +56,16 @@ class Utils {
   }
 
   static Class findClass(String name) {
+    return findClass(name, false);
+  }
+
+  static Class findClass(String name, boolean silence) {
     try {
       return Utils.class.forName(name);
     } catch (ClassNotFoundException e) {
-      logger.error(e.getMessage(), e);
+      if (!silence) {
+        logger.error(e.getMessage(), e);
+      }
       return null;
     }
   }

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/01beb54e/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index badc4e2..1c7979f 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -19,16 +19,16 @@ package org.apache.zeppelin.spark;
 
 import static org.junit.Assert.*;
 
-import java.io.BufferedReader;
 import java.io.File;
 import java.util.HashMap;
 import java.util.LinkedList;
+import java.util.List;
 import java.util.Properties;
 
 import org.apache.spark.SparkConf;
 import org.apache.spark.SparkContext;
-import org.apache.spark.repl.SparkILoop;
 import org.apache.zeppelin.display.AngularObjectRegistry;
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
 import org.apache.zeppelin.resource.LocalResourcePool;
 import org.apache.zeppelin.resource.WellKnownResourceName;
 import org.apache.zeppelin.user.AuthenticationInfo;
@@ -42,7 +42,6 @@ import org.junit.Test;
 import org.junit.runners.MethodSorters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.tools.nsc.interpreter.IMain;
 
 @FixMethodOrder(MethodSorters.NAME_ASCENDING)
 public class SparkInterpreterTest {
@@ -282,4 +281,10 @@ public class SparkInterpreterTest {
     assertEquals(Code.ERROR, repl2.interpret(ddl, context).code());
     repl2.close();
   }
+
+  @Test
+  public void testCompletion() {
+    List<InterpreterCompletion> completions = repl.completion("sc.", 
"sc.".length());
+    assertTrue(completions.size() > 0);
+  }
 }

Reply via email to