Github user aarondav commented on a diff in the pull request:
https://github.com/apache/spark/pull/207#discussion_r10865765
--- Diff: project/MimaBuild.scala ---
@@ -0,0 +1,59 @@
+import com.typesafe.tools.mima.plugin.MimaKeys.{binaryIssueFilters,
previousArtifact}
+import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
+import sbt._
+
+object MimaBuild {
+
+ def ignoredABIProblems(base: File) = {
+ import com.typesafe.tools.mima.core._
+ import com.typesafe.tools.mima.core.ProblemFilters._
+
+ // Excludes relevant to all Spark versions
+ val defaultExcludes = Seq(excludePackage("org.apache.spark.repl"))
+
+ // Read package-private excludes from file
+ val excludeFilePath = (base.getAbsolutePath + "/.mima-excludes")
+ val excludeFile = file(excludeFilePath)
+ if (!excludeFile.exists()) {
+ val msg = "Expected excludes file at " + excludeFilePath +
+ " run ./bin/spark-class org.apache.spark.tools.GenerateMIMAIgnore
to generate it."
+ throw new Exception(msg)
+ }
+ val packagePrivateExcludes =
IO.read(excludeFile).split("\n").flatMap(excludeClass).toSeq
+
+ def excludeClass(className: String) = {
+ Seq(
+ excludePackage(className),
+ ProblemFilters.exclude[MissingClassProblem](className),
+ ProblemFilters.exclude[MissingTypesProblem](className),
+ excludePackage(className + "$"),
+ ProblemFilters.exclude[MissingClassProblem](className + "$"),
+ ProblemFilters.exclude[MissingTypesProblem](className + "$")
+ )
+ }
+ def excludeSparkClass(className: String) =
excludeClass("org.apache.spark." + className)
+
+ // Version-specific excludes
+ val versionExcludes =
+ SparkBuild.SPARK_VERSION match {
+ case v if v.startsWith("1.0") =>
--- End diff --
Perhaps we could put a comment on this block. I believe the intention is to
explicitly affirm breaking API changes _to_ the given SPARK_VERSION (rather
than _from_ it). That is, we should ignore any errors about util.XORShiftRandom
from versions prior to Spark 1.0 when being compared to Spark 1.0, but we
wouldn't ignore errors related to util.XORShiftRandom if we were comparing
Spark 1.0 to future versions. Is my understanding correct?
---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at [email protected] or file a JIRA ticket
with INFRA.
---