Repository: spark
Updated Branches:
  refs/heads/master 4e7a29efd -> 7f3c6ff4f


[SPARK-21903][BUILD] Upgrade scalastyle to 1.0.0.

## What changes were proposed in this pull request?

1.0.0 fixes an issue with import order, explicit type for public methods, line 
length limitation and comment validation:

```
[error] 
.../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala:50:16:
 Are you sure you want to println? If yes, wrap the code block with
[error]       // scalastyle:off println
[error]       println(...)
[error]       // scalastyle:on println
[error] 
.../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:49:
 File line length exceeds 100 characters
[error] 
.../spark/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala:22:21:
 Are you sure you want to println? If yes, wrap the code block with
[error]       // scalastyle:off println
[error]       println(...)
[error]       // scalastyle:on println
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:35:6:
 Public method must have explicit type
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:51:6:
 Public method must have explicit type
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:93:15:
 Public method must have explicit type
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:98:15:
 Public method must have explicit type
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:47:2:
 Insert a space after the start of the comment
[error] 
.../spark/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala:26:43:
 JavaDStream should come before JavaDStreamLike.
```

This PR also fixes the workaround added in SPARK-16877 for 
`org.scalastyle.scalariform.OverrideJavaChecker` feature, added from 0.9.0.

## How was this patch tested?

Manually tested.

Author: hyukjinkwon <gurwls...@gmail.com>

Closes #19116 from HyukjinKwon/scalastyle-1.0.0.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7f3c6ff4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7f3c6ff4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7f3c6ff4

Branch: refs/heads/master
Commit: 7f3c6ff4ff0a501cc7f1fb53a90ea7b5787f68e1
Parents: 4e7a29e
Author: hyukjinkwon <gurwls...@gmail.com>
Authored: Tue Sep 5 19:40:05 2017 +0900
Committer: hyukjinkwon <gurwls...@gmail.com>
Committed: Tue Sep 5 19:40:05 2017 +0900

----------------------------------------------------------------------
 project/SparkBuild.scala                                |  5 +++--
 project/plugins.sbt                                     |  3 +--
 .../src/main/scala/org/apache/spark/repl/Main.scala     |  2 ++
 .../main/scala/org/apache/spark/repl/SparkILoop.scala   |  5 ++++-
 scalastyle-config.xml                                   |  5 +----
 .../java/org/apache/spark/streaming/JavaTestUtils.scala | 12 ++++++------
 6 files changed, 17 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 9d903ed..20848f0 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -163,14 +163,15 @@ object SparkBuild extends PomBuild {
         val configUrlV = scalastyleConfigUrl.in(config).value
         val streamsV = streams.in(config).value
         val failOnErrorV = true
+        val failOnWarningV = false
         val scalastyleTargetV = scalastyleTarget.in(config).value
         val configRefreshHoursV = scalastyleConfigRefreshHours.in(config).value
         val targetV = target.in(config).value
         val configCacheFileV = scalastyleConfigUrlCacheFile.in(config).value
 
         logger.info(s"Running scalastyle on ${name.value} in ${config.name}")
-        Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, 
scalaSourceV, scalastyleTargetV,
-          streamsV, configRefreshHoursV, targetV, configCacheFileV)
+        Tasks.doScalastyle(args, configV, configUrlV, failOnErrorV, 
failOnWarningV, scalaSourceV,
+          scalastyleTargetV, streamsV, configRefreshHoursV, targetV, 
configCacheFileV)
 
         Set.empty
       }

http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/project/plugins.sbt
----------------------------------------------------------------------
diff --git a/project/plugins.sbt b/project/plugins.sbt
index f67e0a1..3c5442b 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -7,8 +7,7 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % 
"5.1.0")
 // sbt 1.0.0 support: 
https://github.com/jrudolph/sbt-dependency-graph/issues/134
 addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.8.2")
 
-// need to make changes to uptake sbt 1.0 support in "org.scalastyle" %% 
"scalastyle-sbt-plugin" % "0.9.0"
-addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.9.0")
+addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "1.0.0")
 
 addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.17")
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
----------------------------------------------------------------------
diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
index 0b16e1b..cc76a70 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala
@@ -47,7 +47,9 @@ object Main extends Logging {
 
   private def scalaOptionError(msg: String): Unit = {
     hasErrors = true
+    // scalastyle:off println
     Console.err.println(msg)
+    // scalastyle:on println
   }
 
   def main(args: Array[String]) {

http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index d1d25b7..ea279e4 100644
--- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -19,7 +19,9 @@ package org.apache.spark.repl
 
 import java.io.BufferedReader
 
+// scalastyle:off println
 import scala.Predef.{println => _, _}
+// scalastyle:on println
 import scala.tools.nsc.Settings
 import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
 import scala.tools.nsc.util.stringFromStream
@@ -46,7 +48,8 @@ class SparkILoop(in0: Option[BufferedReader], out: 
JPrintWriter)
           if (_sc.getConf.getBoolean("spark.ui.reverseProxy", false)) {
             val proxyUrl = _sc.getConf.get("spark.ui.reverseProxyUrl", null)
             if (proxyUrl != null) {
-              println(s"Spark Context Web UI is available at 
${proxyUrl}/proxy/${_sc.applicationId}")
+              println(
+                s"Spark Context Web UI is available at 
${proxyUrl}/proxy/${_sc.applicationId}")
             } else {
               println(s"Spark Context Web UI is available at Spark Master 
Public URL")
             }

http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/scalastyle-config.xml
----------------------------------------------------------------------
diff --git a/scalastyle-config.xml b/scalastyle-config.xml
index 0a4073b..bd7f462 100644
--- a/scalastyle-config.xml
+++ b/scalastyle-config.xml
@@ -268,10 +268,7 @@ This file is divided into 3 sections:
   </check>
 
   <!-- SPARK-16877: Avoid Java annotations -->
-  <check customId="OverrideJavaCase" level="error" 
class="org.scalastyle.scalariform.TokenChecker" enabled="true">
-    <parameters><parameter name="regex">^Override$</parameter></parameters>
-    <customMessage>override modifier should be used instead of 
@java.lang.Override.</customMessage>
-  </check>
+  <check level="error" class="org.scalastyle.scalariform.OverrideJavaChecker" 
enabled="true"></check>
 
   <check level="error" 
class="org.scalastyle.scalariform.DeprecatedJavaChecker" enabled="true"></check>
 

http://git-wip-us.apache.org/repos/asf/spark/blob/7f3c6ff4/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala 
b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
index ae44fd0..0c4a64c 100644
--- a/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
+++ b/streaming/src/test/java/org/apache/spark/streaming/JavaTestUtils.scala
@@ -23,7 +23,7 @@ import scala.collection.JavaConverters._
 import scala.reflect.ClassTag
 
 import org.apache.spark.api.java.JavaRDDLike
-import org.apache.spark.streaming.api.java.{JavaDStreamLike, JavaDStream, 
JavaStreamingContext}
+import org.apache.spark.streaming.api.java.{JavaDStream, JavaDStreamLike, 
JavaStreamingContext}
 
 /** Exposes streaming test functionality in a Java-friendly way. */
 trait JavaTestBase extends TestSuiteBase {
@@ -35,7 +35,7 @@ trait JavaTestBase extends TestSuiteBase {
   def attachTestInputStream[T](
       ssc: JavaStreamingContext,
       data: JList[JList[T]],
-      numPartitions: Int) = {
+      numPartitions: Int): JavaDStream[T] = {
     val seqData = data.asScala.map(_.asScala)
 
     implicit val cm: ClassTag[T] =
@@ -47,9 +47,9 @@ trait JavaTestBase extends TestSuiteBase {
   /**
    * Attach a provided stream to it's associated StreamingContext as a
    * [[org.apache.spark.streaming.TestOutputStream]].
-   **/
+   */
   def attachTestOutputStream[T, This <: JavaDStreamLike[T, This, R], R <: 
JavaRDDLike[T, R]](
-      dstream: JavaDStreamLike[T, This, R]) = {
+      dstream: JavaDStreamLike[T, This, R]): Unit = {
     implicit val cm: ClassTag[T] =
       implicitly[ClassTag[AnyRef]].asInstanceOf[ClassTag[T]]
     val ostream = new TestOutputStreamWithPartitions(dstream.dstream)
@@ -90,10 +90,10 @@ trait JavaTestBase extends TestSuiteBase {
 }
 
 object JavaTestUtils extends JavaTestBase {
-  override def maxWaitTimeMillis = 20000
+  override def maxWaitTimeMillis: Int = 20000
 
 }
 
 object JavaCheckpointTestUtils extends JavaTestBase {
-  override def actuallyWait = true
+  override def actuallyWait: Boolean = true
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to