This is an automated email from the ASF dual-hosted git repository. srowen pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 37ae0a6 [SPARK-33560][TEST-MAVEN][BUILD] Add "unused-import" check to Maven compilation process 37ae0a6 is described below commit 37ae0a608670c660ba4c92b9ebb9cb9fb2bd67e6 Author: yangjie01 <yangji...@baidu.com> AuthorDate: Sat Dec 26 17:40:19 2020 -0600 [SPARK-33560][TEST-MAVEN][BUILD] Add "unused-import" check to Maven compilation process ### What changes were proposed in this pull request? Similar to SPARK-33441, this pr add `unused-import` check to Maven compilation process. After this pr `unused-import` will trigger Maven compilation error. For Scala 2.13 profile, this pr also left TODO(SPARK-33499) similar to SPARK-33441 because `scala.language.higherKinds` no longer needs to be imported explicitly since Scala 2.13.1 ### Why are the changes needed? Let Maven build also check for unused imports as compilation error. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? - Pass the Jenkins or GitHub Action - Local manual test:add an unused import intentionally to trigger maven compilation error. Closes #30784 from LuciferYang/SPARK-33560. Authored-by: yangjie01 <yangji...@baidu.com> Signed-off-by: Sean Owen <sro...@gmail.com> --- pom.xml | 43 ++++++++++++++++++++++ .../sources/StreamingDataSourceV2Suite.scala | 2 +- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4781f98..609c9fc 100644 --- a/pom.xml +++ b/pom.xml @@ -2508,6 +2508,9 @@ <arg>-feature</arg> <arg>-explaintypes</arg> <arg>-target:jvm-1.8</arg> + <arg>-Xfatal-warnings</arg> + <arg>-Ywarn-unused:imports</arg> + <arg>-P:silencer:globalFilters=.*deprecated.*</arg> </args> <jvmArgs> <jvmArg>-Xms1024m</jvmArg> @@ -2521,6 +2524,13 @@ <javacArg>${java.version}</javacArg> <javacArg>-Xlint:all,-serial,-path,-try</javacArg> </javacArgs> + <compilerPlugins> + <compilerPlugin> + <groupId>com.github.ghik</groupId> + <artifactId>silencer-plugin_${scala.version}</artifactId> + <version>1.6.0</version> + </compilerPlugin> + </compilerPlugins> </configuration> </plugin> <plugin> @@ -3243,6 +3253,39 @@ </dependency> </dependencies> </dependencyManagement> + <build> + <pluginManagement> + <plugins> + <plugin> + <groupId>net.alchim31.maven</groupId> + <artifactId>scala-maven-plugin</artifactId> + <configuration> + <args> + <arg>-unchecked</arg> + <arg>-deprecation</arg> + <arg>-feature</arg> + <arg>-explaintypes</arg> + <arg>-target:jvm-1.8</arg> + <arg>-Wconf:cat=deprecation:wv,any:e</arg> + <!-- + TODO(SPARK-33805): Undo the corresponding deprecated usage suppression rule after fixed + <arg>-Wunused:imports</arg> + --> + <arg>-Wconf:cat=scaladoc:wv</arg> + <arg>-Wconf:cat=lint-multiarg-infix:wv</arg> + <arg>-Wconf:cat=other-nullary-override:wv</arg> + <arg>-Wconf:cat=other-match-analysis&site=org.apache.spark.sql.catalyst.catalog.SessionCatalog.lookupFunction.catalogFunction:wv</arg> + <arg>-Wconf:cat=other-pure-statement&site=org.apache.spark.streaming.util.FileBasedWriteAheadLog.readAll.readFile:wv</arg> + <arg>-Wconf:cat=other-pure-statement&site=org.apache.spark.scheduler.OutputCommitCoordinatorSuite.<local OutputCommitCoordinatorSuite>.futureAction:wv</arg> + </args> + <compilerPlugins combine.self="override"> + </compilerPlugins> + </configuration> + </plugin> + </plugins> + </pluginManagement> + </build> + </profile> <!-- diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala index 66544a8..ae0dba7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala @@ -290,7 +290,7 @@ class StreamingDataSourceV2Suite extends StreamTest { Trigger.Continuous(1000)) private def testPositiveCase(readFormat: String, writeFormat: String, trigger: Trigger): Unit = { - testPositiveCaseWithQuery(readFormat, writeFormat, trigger)(() => _) + testPositiveCaseWithQuery(readFormat, writeFormat, trigger)(_ => ()) } private def testPositiveCaseWithQuery( --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org