Repository: spark
Updated Branches:
  refs/heads/master 39d3d6cc9 -> 546683c21


[SPARK-25298][BUILD] Improve build definition for Scala 2.12

## What changes were proposed in this pull request?

Improve build for Scala 2.12. Current build for sbt fails on the subproject 
`repl`:

```
[info] Compiling 6 Scala sources to 
/Users/rendong/wdi/spark/repl/target/scala-2.12/classes...
[error] 
/Users/rendong/wdi/spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala:80:
 overriding lazy value importableSymbolsWithRenames in class ImportHandler of 
type List[(this.intp.global.Symbol, this.intp.global.Name)];
[error]  lazy value importableSymbolsWithRenames needs `override' modifier
[error]       lazy val importableSymbolsWithRenames: List[(Symbol, Name)] = {
[error]                ^
[warn] 
/Users/rendong/wdi/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:53:
 variable addedClasspath in class ILoop is deprecated (since 2.11.0): use 
reset, replay or require to update class path
[warn]       if (addedClasspath != "") {
[warn]           ^
[warn] 
/Users/rendong/wdi/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:54:
 variable addedClasspath in class ILoop is deprecated (since 2.11.0): use 
reset, replay or require to update class path
[warn]         settings.classpath append addedClasspath
[warn]                                   ^
[warn] two warnings found
[error] one error found
[error] (repl/compile:compileIncremental) Compilation failed
[error] Total time: 93 s, completed 2018-9-3 10:07:26
```

## How was this patch tested?

```
./dev/change-scala-version.sh 2.12

##  For Maven
./build/mvn -Pscala-2.12 [mvn commands]
##  For SBT
sbt -Dscala.version=2.12.6
```

Closes #22310 from sadhen/SPARK-25298.

Authored-by: Darcy Shen <sad...@zoho.com>
Signed-off-by: Sean Owen <sean.o...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/546683c2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/546683c2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/546683c2

Branch: refs/heads/master
Commit: 546683c21a23cd5e3827e69609ca91cf92bd9e02
Parents: 39d3d6c
Author: Darcy Shen <sad...@zoho.com>
Authored: Mon Sep 3 07:36:04 2018 -0500
Committer: Sean Owen <sean.o...@databricks.com>
Committed: Mon Sep 3 07:36:04 2018 -0500

----------------------------------------------------------------------
 docs/building-spark.md                          |  16 ++
 project/SparkBuild.scala                        |   6 +
 repl/pom.xml                                    |  14 +-
 .../org/apache/spark/repl/SparkExprTyper.scala  |  74 ------
 .../spark/repl/SparkILoopInterpreter.scala      | 239 -------------------
 .../org/apache/spark/repl/SparkExprTyper.scala  |  74 ++++++
 .../spark/repl/SparkILoopInterpreter.scala      | 239 +++++++++++++++++++
 7 files changed, 337 insertions(+), 325 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/docs/building-spark.md
----------------------------------------------------------------------
diff --git a/docs/building-spark.md b/docs/building-spark.md
index 0086aea..1d3e0b1 100644
--- a/docs/building-spark.md
+++ b/docs/building-spark.md
@@ -256,3 +256,19 @@ On Linux, this can be done by `sudo service docker start`.
 or
 
     ./build/sbt docker-integration-tests/test
+
+## Change Scala Version
+
+To build Spark using another supported Scala version, please change the major 
Scala version using (e.g. 2.12):
+
+    ./dev/change-scala-version.sh 2.12
+
+For Maven, please enable the profile (e.g. 2.12):
+
+    ./build/mvn -Pscala-2.12 compile
+
+For SBT, specify a complete scala version using (e.g. 2.12.6):
+
+    ./build/sbt -Dscala.version=2.12.6
+
+Otherwise, the sbt-pom-reader plugin will use the `scala.version` specified in 
the spark-parent pom.

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 1f45a06..a5ed908 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -94,6 +94,12 @@ object SparkBuild extends PomBuild {
       case Some(v) =>
         v.split("(\\s+|,)").filterNot(_.isEmpty).map(_.trim.replaceAll("-P", 
"")).toSeq
     }
+
+    Option(System.getProperty("scala.version"))
+      .filter(_.startsWith("2.12"))
+      .foreach { versionString =>
+        System.setProperty("scala-2.12", "true")
+      }
     if (System.getProperty("scala-2.12") == "") {
       // To activate scala-2.10 profile, replace empty property value to 
non-empty value
       // in the same way as Maven which handles -Dname as -Dname=true before 
executes build process.

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/repl/pom.xml
----------------------------------------------------------------------
diff --git a/repl/pom.xml b/repl/pom.xml
index 861bbd7..e8464a6 100644
--- a/repl/pom.xml
+++ b/repl/pom.xml
@@ -32,8 +32,8 @@
 
   <properties>
     <sbt.project.name>repl</sbt.project.name>
-    <extra.source.dir>scala-2.11/src/main/scala</extra.source.dir>
-    <extra.testsource.dir>scala-2.11/src/test/scala</extra.testsource.dir>
+    <extra.source.dir>src/main/scala-${scala.binary.version}</extra.source.dir>
+    
<extra.testsource.dir>src/test/scala-${scala.binary.version}</extra.testsource.dir>
   </properties>
 
   <dependencies>
@@ -167,14 +167,4 @@
     </plugins>
   </build>
 
-  <profiles>
-    <profile>
-      <id>scala-2.12</id>
-      <properties>
-        <extra.source.dir>scala-2.12/src/main/scala</extra.source.dir>
-        <extra.testsource.dir>scala-2.12/src/test/scala</extra.testsource.dir>
-      </properties>
-    </profile>
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala
deleted file mode 100644
index 724ce9a..0000000
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkExprTyper.scala
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.repl
-
-import scala.tools.nsc.interpreter.{ExprTyper, IR}
-
-trait SparkExprTyper extends ExprTyper {
-
-  import repl._
-  import global.{reporter => _, Import => _, _}
-  import naming.freshInternalVarName
-
-  def doInterpret(code: String): IR.Result = {
-    // interpret/interpretSynthetic may change the phase,
-    // which would have unintended effects on types.
-    val savedPhase = phase
-    try interpretSynthetic(code) finally phase = savedPhase
-  }
-
-  override def symbolOfLine(code: String): Symbol = {
-    def asExpr(): Symbol = {
-      val name = freshInternalVarName()
-      // Typing it with a lazy val would give us the right type, but runs
-      // into compiler bugs with things like existentials, so we compile it
-      // behind a def and strip the NullaryMethodType which wraps the expr.
-      val line = "def " + name + " = " + code
-
-      doInterpret(line) match {
-        case IR.Success =>
-          val sym0 = symbolOfTerm(name)
-          // drop NullaryMethodType
-          sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
-        case _ => NoSymbol
-      }
-    }
-
-    def asDefn(): Symbol = {
-      val old = repl.definedSymbolList.toSet
-
-      doInterpret(code) match {
-        case IR.Success =>
-          repl.definedSymbolList filterNot old match {
-            case Nil => NoSymbol
-            case sym :: Nil => sym
-            case syms => NoSymbol.newOverloaded(NoPrefix, syms)
-          }
-        case _ => NoSymbol
-      }
-    }
-
-    def asError(): Symbol = {
-      doInterpret(code)
-      NoSymbol
-    }
-
-    beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
deleted file mode 100644
index e736607..0000000
--- 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoopInterpreter.scala
+++ /dev/null
@@ -1,239 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.repl
-
-import scala.collection.mutable
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter._
-
-class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends 
IMain(settings, out) {
-  self =>
-
-  override lazy val memberHandlers = new {
-    val intp: self.type = self
-  } with MemberHandlers {
-    import intp.global._
-
-    override def chooseHandler(member: intp.global.Tree): MemberHandler = 
member match {
-      case member: Import => new SparkImportHandler(member)
-      case _ => super.chooseHandler(member)
-    }
-
-    class SparkImportHandler(imp: Import) extends ImportHandler(imp: Import) {
-
-      override def targetType: Type = 
intp.global.rootMirror.getModuleIfDefined("" + expr) match {
-        case NoSymbol => intp.typeOfExpression("" + expr)
-        case sym => sym.tpe
-      }
-
-      private def safeIndexOf(name: Name, s: String): Int = fixIndexOf(name, 
pos(name, s))
-      private def fixIndexOf(name: Name, idx: Int): Int = if (idx == 
name.length) -1 else idx
-      private def pos(name: Name, s: String): Int = {
-        var i = name.pos(s.charAt(0), 0)
-        val sLen = s.length()
-        if (sLen == 1) return i
-        while (i + sLen <= name.length) {
-          var j = 1
-          while (s.charAt(j) == name.charAt(i + j)) {
-            j += 1
-            if (j == sLen) return i
-          }
-          i = name.pos(s.charAt(0), i + 1)
-        }
-        name.length
-      }
-
-      private def isFlattenedSymbol(sym: Symbol): Boolean =
-        sym.owner.isPackageClass &&
-          sym.name.containsName(nme.NAME_JOIN_STRING) &&
-          sym.owner.info.member(sym.name.take(
-            safeIndexOf(sym.name, nme.NAME_JOIN_STRING))) != NoSymbol
-
-      private def importableTargetMembers =
-        
importableMembers(exitingTyper(targetType)).filterNot(isFlattenedSymbol).toList
-
-      def isIndividualImport(s: ImportSelector): Boolean =
-        s.name != nme.WILDCARD && s.rename != nme.WILDCARD
-      def isWildcardImport(s: ImportSelector): Boolean =
-        s.name == nme.WILDCARD
-
-      // non-wildcard imports
-      private def individualSelectors = selectors filter isIndividualImport
-
-      override val importsWildcard: Boolean = selectors exists isWildcardImport
-
-      lazy val importableSymbolsWithRenames: List[(Symbol, Name)] = {
-        val selectorRenameMap =
-          individualSelectors.flatMap(x => x.name.bothNames zip 
x.rename.bothNames).toMap
-        importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) 
map (m -> _))
-      }
-
-      override lazy val individualSymbols: List[Symbol] = 
importableSymbolsWithRenames map (_._1)
-      override lazy val wildcardSymbols: List[Symbol] =
-        if (importsWildcard) importableTargetMembers else Nil
-
-    }
-
-  }
-
-  object expressionTyper extends {
-    val repl: SparkILoopInterpreter.this.type = self
-  } with SparkExprTyper { }
-
-  override def symbolOfLine(code: String): global.Symbol =
-    expressionTyper.symbolOfLine(code)
-
-  override def typeOfExpression(expr: String, silent: Boolean): global.Type =
-    expressionTyper.typeOfExpression(expr, silent)
-
-
-  import global.Name
-  override def importsCode(wanted: Set[Name], wrapper: Request#Wrapper,
-                           definesClass: Boolean, generousImports: Boolean): 
ComputedImports = {
-
-    import global._
-    import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, 
PredefModule }
-    import memberHandlers._
-
-    val header, code, trailingBraces, accessPath = new StringBuilder
-    val currentImps = mutable.HashSet[Name]()
-    // only emit predef import header if name not resolved in history, loosely
-    var predefEscapes = false
-
-    /**
-     * Narrow down the list of requests from which imports
-     * should be taken.  Removes requests which cannot contribute
-     * useful imports for the specified set of wanted names.
-     */
-    case class ReqAndHandler(req: Request, handler: MemberHandler)
-
-    def reqsToUse: List[ReqAndHandler] = {
-      /**
-       * Loop through a list of MemberHandlers and select which ones to keep.
-       * 'wanted' is the set of names that need to be imported.
-       */
-      def select(reqs: List[ReqAndHandler], wanted: Set[Name]): 
List[ReqAndHandler] = {
-        // Single symbol imports might be implicits! See bug #1752.  Rather 
than
-        // try to finesse this, we will mimic all imports for now.
-        def keepHandler(handler: MemberHandler) = handler match {
-          // While defining classes in class based mode - implicits are not 
needed.
-          case h: ImportHandler if isClassBased && definesClass =>
-            h.importedNames.exists(x => wanted.contains(x))
-          case _: ImportHandler => true
-          case x if generousImports => x.definesImplicit ||
-            (x.definedNames exists (d => wanted.exists(w => d.startsWith(w))))
-          case x => x.definesImplicit ||
-            (x.definedNames exists wanted)
-        }
-
-        reqs match {
-          case Nil =>
-            predefEscapes = wanted contains PredefModule.name ; Nil
-          case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
-          case rh :: rest =>
-            import rh.handler._
-            val augment = rh match {
-              case ReqAndHandler(_, _: ImportHandler) => referencedNames
-              case _ => Nil
-            }
-            val newWanted = wanted ++ augment -- definedNames -- importedNames
-            rh :: select(rest, newWanted)
-        }
-      }
-
-      /** Flatten the handlers out and pair each with the original request */
-      select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) 
}, wanted).reverse
-    }
-
-    // add code for a new object to hold some imports
-    def addWrapper() {
-      import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
-      code append (wrapper.prewrap format iw)
-      trailingBraces append wrapper.postwrap
-      accessPath append s".$iw"
-      currentImps.clear()
-    }
-
-    def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
-
-    def wrapBeforeAndAfter[T](op: => T): T = {
-      addWrapper()
-      try op finally addWrapper()
-    }
-
-    // imports from Predef are relocated to the template header to allow 
hiding.
-    def checkHeader(h: ImportHandler) = h.referencedNames contains 
PredefModule.name
-
-    // loop through previous requests, adding imports for each one
-    wrapBeforeAndAfter {
-      // Reusing a single temporary value when import from a line with 
multiple definitions.
-      val tempValLines = mutable.Set[Int]()
-      for (ReqAndHandler(req, handler) <- reqsToUse) {
-        val objName = req.lineRep.readPathInstance
-        handler match {
-          case h: ImportHandler if checkHeader(h) =>
-            header.clear()
-            header append f"${h.member}%n"
-          // If the user entered an import, then just use it; add an import 
wrapping
-          // level if the import might conflict with some other import
-          case x: ImportHandler if x.importsWildcard =>
-            wrapBeforeAndAfter(code append (x.member + "\n"))
-          case x: ImportHandler =>
-            maybeWrap(x.importedNames: _*)
-            code append (x.member + "\n")
-            currentImps ++= x.importedNames
-
-          case x if isClassBased =>
-            for (sym <- x.definedSymbols) {
-              maybeWrap(sym.name)
-              x match {
-                case _: ClassHandler =>
-                  code.append(s"import 
${objName}${req.accessPath}.`${sym.name}`\n")
-                case _ =>
-                  val valName = 
s"${req.lineRep.packageName}${req.lineRep.readName}"
-                  if (!tempValLines.contains(req.lineRep.lineId)) {
-                    code.append(s"val $valName: ${objName}.type = $objName\n")
-                    tempValLines += req.lineRep.lineId
-                  }
-                  code.append(s"import 
${valName}${req.accessPath}.`${sym.name}`\n")
-              }
-              currentImps += sym.name
-            }
-          // For other requests, import each defined name.
-          // import them explicitly instead of with _, so that
-          // ambiguity errors will not be generated. Also, quote
-          // the name of the variable, so that we don't need to
-          // handle quoting keywords separately.
-          case x =>
-            for (sym <- x.definedSymbols) {
-              maybeWrap(sym.name)
-              code append s"import ${x.path}\n"
-              currentImps += sym.name
-            }
-        }
-      }
-    }
-
-    val computedHeader = if (predefEscapes) header.toString else ""
-    ComputedImports(computedHeader, code.toString, trailingBraces.toString, 
accessPath.toString)
-  }
-
-  private def allReqAndHandlers =
-    prevRequestList flatMap (req => req.handlers map (req -> _))
-
-}

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/repl/src/main/scala-2.11/org/apache/spark/repl/SparkExprTyper.scala
----------------------------------------------------------------------
diff --git 
a/repl/src/main/scala-2.11/org/apache/spark/repl/SparkExprTyper.scala 
b/repl/src/main/scala-2.11/org/apache/spark/repl/SparkExprTyper.scala
new file mode 100644
index 0000000..724ce9a
--- /dev/null
+++ b/repl/src/main/scala-2.11/org/apache/spark/repl/SparkExprTyper.scala
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import scala.tools.nsc.interpreter.{ExprTyper, IR}
+
+trait SparkExprTyper extends ExprTyper {
+
+  import repl._
+  import global.{reporter => _, Import => _, _}
+  import naming.freshInternalVarName
+
+  def doInterpret(code: String): IR.Result = {
+    // interpret/interpretSynthetic may change the phase,
+    // which would have unintended effects on types.
+    val savedPhase = phase
+    try interpretSynthetic(code) finally phase = savedPhase
+  }
+
+  override def symbolOfLine(code: String): Symbol = {
+    def asExpr(): Symbol = {
+      val name = freshInternalVarName()
+      // Typing it with a lazy val would give us the right type, but runs
+      // into compiler bugs with things like existentials, so we compile it
+      // behind a def and strip the NullaryMethodType which wraps the expr.
+      val line = "def " + name + " = " + code
+
+      doInterpret(line) match {
+        case IR.Success =>
+          val sym0 = symbolOfTerm(name)
+          // drop NullaryMethodType
+          sym0.cloneSymbol setInfo exitingTyper(sym0.tpe_*.finalResultType)
+        case _ => NoSymbol
+      }
+    }
+
+    def asDefn(): Symbol = {
+      val old = repl.definedSymbolList.toSet
+
+      doInterpret(code) match {
+        case IR.Success =>
+          repl.definedSymbolList filterNot old match {
+            case Nil => NoSymbol
+            case sym :: Nil => sym
+            case syms => NoSymbol.newOverloaded(NoPrefix, syms)
+          }
+        case _ => NoSymbol
+      }
+    }
+
+    def asError(): Symbol = {
+      doInterpret(code)
+      NoSymbol
+    }
+
+    beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/546683c2/repl/src/main/scala-2.11/org/apache/spark/repl/SparkILoopInterpreter.scala
----------------------------------------------------------------------
diff --git 
a/repl/src/main/scala-2.11/org/apache/spark/repl/SparkILoopInterpreter.scala 
b/repl/src/main/scala-2.11/org/apache/spark/repl/SparkILoopInterpreter.scala
new file mode 100644
index 0000000..e736607
--- /dev/null
+++ b/repl/src/main/scala-2.11/org/apache/spark/repl/SparkILoopInterpreter.scala
@@ -0,0 +1,239 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.repl
+
+import scala.collection.mutable
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter._
+
+class SparkILoopInterpreter(settings: Settings, out: JPrintWriter) extends 
IMain(settings, out) {
+  self =>
+
+  override lazy val memberHandlers = new {
+    val intp: self.type = self
+  } with MemberHandlers {
+    import intp.global._
+
+    override def chooseHandler(member: intp.global.Tree): MemberHandler = 
member match {
+      case member: Import => new SparkImportHandler(member)
+      case _ => super.chooseHandler(member)
+    }
+
+    class SparkImportHandler(imp: Import) extends ImportHandler(imp: Import) {
+
+      override def targetType: Type = 
intp.global.rootMirror.getModuleIfDefined("" + expr) match {
+        case NoSymbol => intp.typeOfExpression("" + expr)
+        case sym => sym.tpe
+      }
+
+      private def safeIndexOf(name: Name, s: String): Int = fixIndexOf(name, 
pos(name, s))
+      private def fixIndexOf(name: Name, idx: Int): Int = if (idx == 
name.length) -1 else idx
+      private def pos(name: Name, s: String): Int = {
+        var i = name.pos(s.charAt(0), 0)
+        val sLen = s.length()
+        if (sLen == 1) return i
+        while (i + sLen <= name.length) {
+          var j = 1
+          while (s.charAt(j) == name.charAt(i + j)) {
+            j += 1
+            if (j == sLen) return i
+          }
+          i = name.pos(s.charAt(0), i + 1)
+        }
+        name.length
+      }
+
+      private def isFlattenedSymbol(sym: Symbol): Boolean =
+        sym.owner.isPackageClass &&
+          sym.name.containsName(nme.NAME_JOIN_STRING) &&
+          sym.owner.info.member(sym.name.take(
+            safeIndexOf(sym.name, nme.NAME_JOIN_STRING))) != NoSymbol
+
+      private def importableTargetMembers =
+        
importableMembers(exitingTyper(targetType)).filterNot(isFlattenedSymbol).toList
+
+      def isIndividualImport(s: ImportSelector): Boolean =
+        s.name != nme.WILDCARD && s.rename != nme.WILDCARD
+      def isWildcardImport(s: ImportSelector): Boolean =
+        s.name == nme.WILDCARD
+
+      // non-wildcard imports
+      private def individualSelectors = selectors filter isIndividualImport
+
+      override val importsWildcard: Boolean = selectors exists isWildcardImport
+
+      lazy val importableSymbolsWithRenames: List[(Symbol, Name)] = {
+        val selectorRenameMap =
+          individualSelectors.flatMap(x => x.name.bothNames zip 
x.rename.bothNames).toMap
+        importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) 
map (m -> _))
+      }
+
+      override lazy val individualSymbols: List[Symbol] = 
importableSymbolsWithRenames map (_._1)
+      override lazy val wildcardSymbols: List[Symbol] =
+        if (importsWildcard) importableTargetMembers else Nil
+
+    }
+
+  }
+
+  object expressionTyper extends {
+    val repl: SparkILoopInterpreter.this.type = self
+  } with SparkExprTyper { }
+
+  override def symbolOfLine(code: String): global.Symbol =
+    expressionTyper.symbolOfLine(code)
+
+  override def typeOfExpression(expr: String, silent: Boolean): global.Type =
+    expressionTyper.typeOfExpression(expr, silent)
+
+
+  import global.Name
+  override def importsCode(wanted: Set[Name], wrapper: Request#Wrapper,
+                           definesClass: Boolean, generousImports: Boolean): 
ComputedImports = {
+
+    import global._
+    import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, 
PredefModule }
+    import memberHandlers._
+
+    val header, code, trailingBraces, accessPath = new StringBuilder
+    val currentImps = mutable.HashSet[Name]()
+    // only emit predef import header if name not resolved in history, loosely
+    var predefEscapes = false
+
+    /**
+     * Narrow down the list of requests from which imports
+     * should be taken.  Removes requests which cannot contribute
+     * useful imports for the specified set of wanted names.
+     */
+    case class ReqAndHandler(req: Request, handler: MemberHandler)
+
+    def reqsToUse: List[ReqAndHandler] = {
+      /**
+       * Loop through a list of MemberHandlers and select which ones to keep.
+       * 'wanted' is the set of names that need to be imported.
+       */
+      def select(reqs: List[ReqAndHandler], wanted: Set[Name]): 
List[ReqAndHandler] = {
+        // Single symbol imports might be implicits! See bug #1752.  Rather 
than
+        // try to finesse this, we will mimic all imports for now.
+        def keepHandler(handler: MemberHandler) = handler match {
+          // While defining classes in class based mode - implicits are not 
needed.
+          case h: ImportHandler if isClassBased && definesClass =>
+            h.importedNames.exists(x => wanted.contains(x))
+          case _: ImportHandler => true
+          case x if generousImports => x.definesImplicit ||
+            (x.definedNames exists (d => wanted.exists(w => d.startsWith(w))))
+          case x => x.definesImplicit ||
+            (x.definedNames exists wanted)
+        }
+
+        reqs match {
+          case Nil =>
+            predefEscapes = wanted contains PredefModule.name ; Nil
+          case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
+          case rh :: rest =>
+            import rh.handler._
+            val augment = rh match {
+              case ReqAndHandler(_, _: ImportHandler) => referencedNames
+              case _ => Nil
+            }
+            val newWanted = wanted ++ augment -- definedNames -- importedNames
+            rh :: select(rest, newWanted)
+        }
+      }
+
+      /** Flatten the handlers out and pair each with the original request */
+      select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) 
}, wanted).reverse
+    }
+
+    // add code for a new object to hold some imports
+    def addWrapper() {
+      import nme.{ INTERPRETER_IMPORT_WRAPPER => iw }
+      code append (wrapper.prewrap format iw)
+      trailingBraces append wrapper.postwrap
+      accessPath append s".$iw"
+      currentImps.clear()
+    }
+
+    def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
+
+    def wrapBeforeAndAfter[T](op: => T): T = {
+      addWrapper()
+      try op finally addWrapper()
+    }
+
+    // imports from Predef are relocated to the template header to allow 
hiding.
+    def checkHeader(h: ImportHandler) = h.referencedNames contains 
PredefModule.name
+
+    // loop through previous requests, adding imports for each one
+    wrapBeforeAndAfter {
+      // Reusing a single temporary value when import from a line with 
multiple definitions.
+      val tempValLines = mutable.Set[Int]()
+      for (ReqAndHandler(req, handler) <- reqsToUse) {
+        val objName = req.lineRep.readPathInstance
+        handler match {
+          case h: ImportHandler if checkHeader(h) =>
+            header.clear()
+            header append f"${h.member}%n"
+          // If the user entered an import, then just use it; add an import 
wrapping
+          // level if the import might conflict with some other import
+          case x: ImportHandler if x.importsWildcard =>
+            wrapBeforeAndAfter(code append (x.member + "\n"))
+          case x: ImportHandler =>
+            maybeWrap(x.importedNames: _*)
+            code append (x.member + "\n")
+            currentImps ++= x.importedNames
+
+          case x if isClassBased =>
+            for (sym <- x.definedSymbols) {
+              maybeWrap(sym.name)
+              x match {
+                case _: ClassHandler =>
+                  code.append(s"import 
${objName}${req.accessPath}.`${sym.name}`\n")
+                case _ =>
+                  val valName = 
s"${req.lineRep.packageName}${req.lineRep.readName}"
+                  if (!tempValLines.contains(req.lineRep.lineId)) {
+                    code.append(s"val $valName: ${objName}.type = $objName\n")
+                    tempValLines += req.lineRep.lineId
+                  }
+                  code.append(s"import 
${valName}${req.accessPath}.`${sym.name}`\n")
+              }
+              currentImps += sym.name
+            }
+          // For other requests, import each defined name.
+          // import them explicitly instead of with _, so that
+          // ambiguity errors will not be generated. Also, quote
+          // the name of the variable, so that we don't need to
+          // handle quoting keywords separately.
+          case x =>
+            for (sym <- x.definedSymbols) {
+              maybeWrap(sym.name)
+              code append s"import ${x.path}\n"
+              currentImps += sym.name
+            }
+        }
+      }
+    }
+
+    val computedHeader = if (predefEscapes) header.toString else ""
+    ComputedImports(computedHeader, code.toString, trailingBraces.toString, 
accessPath.toString)
+  }
+
+  private def allReqAndHandlers =
+    prevRequestList flatMap (req => req.handlers map (req -> _))
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to