Github user pwendell commented on a diff in the pull request:
https://github.com/apache/spark/pull/772#discussion_r14538915
--- Diff: project/SparkBuild.scala ---
@@ -15,393 +15,194 @@
* limitations under the License.
*/
+import scala.util.Properties
+import scala.collection.JavaConversions._
+
import sbt._
-import sbt.Classpaths.publishTask
import sbt.Keys._
-import sbtassembly.Plugin._
-import AssemblyKeys._
-import scala.util.Properties
import org.scalastyle.sbt.ScalastylePlugin.{Settings => ScalaStyleSettings}
-import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
-import sbtunidoc.Plugin._
-import UnidocKeys._
+import com.typesafe.sbt.pom.{PomBuild, SbtPomKeys}
+import net.virtualvoid.sbt.graph.Plugin.graphSettings
-import scala.collection.JavaConversions._
+object BuildCommons {
-// For Sonatype publishing
-// import com.jsuereth.pgp.sbtplugin.PgpKeys._
+ private val buildLocation = file(".").getAbsoluteFile.getParentFile
-object SparkBuild extends Build {
- val SPARK_VERSION = "1.1.0-SNAPSHOT"
- val SPARK_VERSION_SHORT = SPARK_VERSION.replaceAll("-SNAPSHOT", "")
+ val allProjects@Seq(bagel, catalyst, core, graphx, hive, mllib, repl,
spark, sql, streaming,
+ streamingFlume, streamingKafka, streamingMqtt, streamingTwitter,
streamingZeromq) =
+ Seq("bagel", "catalyst", "core", "graphx", "hive", "mllib", "repl",
"spark", "sql",
+ "streaming", "streaming-flume", "streaming-kafka", "streaming-mqtt",
"streaming-twitter",
+ "streaming-zeromq").map(ProjectRef(buildLocation, _))
- // Hadoop version to build against. For example, "1.0.4" for Apache
releases, or
- // "2.0.0-mr1-cdh4.2.0" for Cloudera Hadoop. Note that these variables
can be set
- // through the environment variables SPARK_HADOOP_VERSION and SPARK_YARN.
- val DEFAULT_HADOOP_VERSION = "1.0.4"
+ val optionallyEnabledProjects@Seq(yarn, yarnStable, yarnAlpha,
java8Tests, sparkGangliaLgpl) =
+ Seq("yarn", "yarn-stable", "yarn-alpha", "java8-tests",
"ganglia-lgpl").map(ProjectRef(buildLocation, _))
- // Whether the Hadoop version to build against is 2.2.x, or a variant of
it. This can be set
- // through the SPARK_IS_NEW_HADOOP environment variable.
- val DEFAULT_IS_NEW_HADOOP = false
+ val assemblyProjects@Seq(assembly, examples, tools) = Seq("assembly",
"examples", "tools")
+ .map(ProjectRef(buildLocation, _))
- val DEFAULT_YARN = false
+ val sparkHome = buildLocation
+}
- val DEFAULT_HIVE = false
+object SparkBuild extends PomBuild {
- // HBase version; set as appropriate.
- val HBASE_VERSION = "0.94.6"
+ import BuildCommons._
+ import scala.collection.mutable.Map
- // Target JVM version
- val SCALAC_JVM_VERSION = "jvm-1.6"
- val JAVAC_JVM_VERSION = "1.6"
+ val projectsMap: Map[String, Seq[Setting[_]]] = Map.empty
- lazy val root = Project("root", file("."), settings = rootSettings)
aggregate(allProjects: _*)
+ def backwardCompatibility = {
+ import scala.collection.mutable
+ var profiles: mutable.Seq[String] = mutable.Seq.empty
--- End diff --
Here it might be nice to echo back to the user that you've converted their
strings into a new option:
```
NOTE: Setting SPARK_YARN is deprecated. Use the `-Pyarn` flag.
NOTE: Setting SPARK_HADOOP_VERSION is deprecated. Use `-Dhadoop.version=XX`.
```
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---