[ 
https://issues.apache.org/jira/browse/BEAM-3250?focusedWorklogId=88406&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-88406
 ]

ASF GitHub Bot logged work on BEAM-3250:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 06/Apr/18 11:53
            Start Date: 06/Apr/18 11:53
    Worklog Time Spent: 10m 
      Work Description: lukecwik closed pull request #5030: [BEAM-3250] Migrate 
Spark ValidatesRunner tests to Gradle
URL: https://github.com/apache/beam/pull/5030
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_Spark.groovy 
b/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_Spark.groovy
index b4a0d029db5..33c0d3f8f4d 100644
--- a/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_Spark.groovy
+++ b/.test-infra/jenkins/job_beam_PostCommit_Java_ValidatesRunner_Spark.groovy
@@ -19,16 +19,22 @@
 import common_job_properties
 
 // This job runs the suite of ValidatesRunner tests against the Spark runner.
-mavenJob('beam_PostCommit_Java_ValidatesRunner_Spark') {
+job('beam_PostCommit_Java_ValidatesRunner_Spark_Gradle') {
   description('Runs the ValidatesRunner suite on the Spark runner.')
-
+  previousNames('beam_PostCommit_Java_ValidatesRunner_Spark')
   previousNames('beam_PostCommit_Java_RunnableOnService_Spark')
 
   // Set common parameters.
   common_job_properties.setTopLevelMainJobProperties(delegate, 'master', 120)
 
-  // Set maven parameters.
-  common_job_properties.setMavenConfig(delegate)
+  def gradle_switches = [
+    // Gradle log verbosity enough to diagnose basic build issues
+    "--info",
+    // Continue the build even if there is a failure to show as many potential 
failures as possible.
+    '--continue',
+    // Until we verify the build cache is working appropriately, force 
rerunning all tasks
+    '--rerun-tasks',
+  ]
 
   // Sets that this is a PostCommit job.
   common_job_properties.setPostCommit(delegate)
@@ -39,6 +45,14 @@ mavenJob('beam_PostCommit_Java_ValidatesRunner_Spark') {
     'Apache Spark Runner ValidatesRunner Tests',
     'Run Spark ValidatesRunner')
 
-  // Maven goals for this job.
-  goals('-B -e clean verify -am -pl runners/spark 
-Plocal-validates-runner-tests -Dspark.ui.enabled=false')
+  // Gradle goals for this job.
+  steps {
+    gradle {
+      rootBuildScriptDir(common_job_properties.checkoutDir)
+      tasks(':runners:spark:validatesRunner')
+      for (String gradle_switch : gradle_switches) {
+        switches(gradle_switch)
+      }
+    }
+  }
 }
diff --git a/build_rules.gradle b/build_rules.gradle
index 77a794ca1c2..aad4fd13fe5 100644
--- a/build_rules.gradle
+++ b/build_rules.gradle
@@ -144,6 +144,7 @@ def apex_core_version = "3.6.0"
 def apex_malhar_version = "3.4.0"
 def postgres_version = "9.4.1212.jre7"
 def jaxb_api_version = "2.2.12"
+def kafka_version = "1.0.0"
 
 // A map of maps containing common libraries used per language. To use:
 // dependencies {
@@ -227,7 +228,8 @@ ext.library = [
     jaxb_api: "javax.xml.bind:jaxb-api:$jaxb_api_version",
     joda_time: "joda-time:joda-time:2.4",
     junit: "junit:junit:4.12",
-    kafka_clients: "org.apache.kafka:kafka-clients:1.0.0",
+    kafka_2_11: "org.apache.kafka:kafka_2.11:$kafka_version",
+    kafka_clients: "org.apache.kafka:kafka-clients:$kafka_version",
     malhar_library: "org.apache.apex:malhar-library:$apex_malhar_version",
     mockito_core: "org.mockito:mockito-core:1.9.5",
     netty_handler: "io.netty:netty-handler:$netty_version",
diff --git a/runners/spark/build.gradle b/runners/spark/build.gradle
index 7eccf4f405e..2a573de6c52 100644
--- a/runners/spark/build.gradle
+++ b/runners/spark/build.gradle
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+import groovy.json.JsonOutput
+
 apply from: project(":").file("build_rules.gradle")
 applyJavaNature(artifactId: "beam-runners-spark")
 
@@ -30,6 +32,11 @@ description = "Apache Beam :: Runners :: Spark"
  */
 evaluationDependsOn(":model:fn-execution")
 evaluationDependsOn(":runners:core-java")
+evaluationDependsOn(":sdks:java:core")
+
+configurations {
+  validatesRunner
+}
 
 test {
   systemProperty "beam.spark.test.reuseSparkContext", "true"
@@ -72,16 +79,21 @@ dependencies {
   provided "org.apache.zookeeper:zookeeper:3.4.6"
   provided "org.scala-lang:scala-library:2.11.8"
   provided "com.esotericsoftware.kryo:kryo:2.21"
-  testCompile project(path: ":sdks:java:io:kafka", configuration: "shadow")
-  testCompile project(path: ":sdks:java:core", configuration: "shadowTest")
+  shadowTest project(path: ":sdks:java:io:kafka", configuration: "shadow")
+  shadowTest project(path: ":sdks:java:core", configuration: "shadowTest")
   // SparkStateInternalsTest extends abstract StateInternalsTest
-  testCompile project(":runners:core-java").sourceSets.test.output
-  testCompile library.java.avro
-  testCompile library.java.kafka_clients
-  testCompile library.java.junit
-  testCompile library.java.mockito_core
-  testCompile library.java.jackson_dataformat_yaml
-  testCompile "org.apache.kafka:kafka_2.11:0.11.0.1"
+  shadowTest project(":runners:core-java").sourceSets.test.output
+  shadowTest library.java.avro
+  shadowTest library.java.kafka_clients
+  shadowTest library.java.junit
+  shadowTest library.java.mockito_core
+  shadowTest library.java.jackson_dataformat_yaml
+  shadowTest library.java.kafka_2_11
+  validatesRunner project(path: ":sdks:java:core", configuration: "shadowTest")
+  validatesRunner project(path: project.path, configuration: "shadowTest")
+  validatesRunner project(path: project.path, configuration: "shadow")
+  validatesRunner project(path: project.path, configuration: "provided")
+  validatesRunner project.sourceSets.test.output
 }
 
 configurations.testRuntimeClasspath {
@@ -89,5 +101,58 @@ configurations.testRuntimeClasspath {
   exclude group: "org.slf4j", module: "slf4j-jdk14"
 }
 
+configurations.validatesRunner {
+  // Testing the Spark runner causes a StackOverflowError if slf4j-jdk14 is on 
the classpath
+  exclude group: "org.slf4j", module: "slf4j-jdk14"
+}
+
+task validatesRunnerBatch(type: Test) {
+  group = "Verification"
+  def pipelineOptions = JsonOutput.toJson([
+          "--runner=TestSparkRunner",
+          "--streaming=false",
+          "--enableSparkMetricSinks=false",
+  ])
+  systemProperty "beamTestPipelineOptions", pipelineOptions
+  systemProperty "beam.spark.test.reuseSparkContext", "true"
+  systemProperty "spark.ui.enabled", "false"
+  systemProperty "spark.ui.showConsoleProgress", "false"
+
+  classpath = configurations.validatesRunner
+  testClassesDirs = 
files(project(":sdks:java:core").sourceSets.test.output.classesDirs) + 
files(project.sourceSets.test.output.classesDirs)
+  forkEvery 1
+  useJUnit {
+    includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
+    includeCategories 'org.apache.beam.runners.spark.UsesCheckpointRecovery'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesSplittableParDo'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesAttemptedMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
+    excludeCategories 'org.apache.beam.sdk.testing.UsesCustomWindowMerging'
+  }
+}
+
+task validatesRunnerStreaming(type: Test) {
+  group = "Verification"
+  def pipelineOptions = JsonOutput.toJson([
+          "--runner=TestSparkRunner",
+          "--forceStreaming=true",
+          "--enableSparkMetricSinks=true",
+  ])
+  systemProperty "beamTestPipelineOptions", pipelineOptions
+
+  forkEvery 1
+  useJUnit {
+    includeCategories 'org.apache.beam.runners.spark.StreamingTest'
+  }
+}
+
+task validatesRunner {
+  group = "Verification"
+  description "Validates Spark runner"
+  dependsOn validatesRunnerBatch
+  dependsOn validatesRunnerStreaming
+}
+
 // Generates :runners:spark:runQuickstartJavaSpark
 createJavaQuickstartValidationTask(name: 'Spark')


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 88406)
    Time Spent: 3h 20m  (was: 3h 10m)

> Migrate ValidatesRunner Jenkins PostCommits to Gradle
> -----------------------------------------------------
>
>                 Key: BEAM-3250
>                 URL: https://issues.apache.org/jira/browse/BEAM-3250
>             Project: Beam
>          Issue Type: Sub-task
>          Components: build-system, testing
>            Reporter: Luke Cwik
>            Assignee: Henning Rohde
>            Priority: Major
>          Time Spent: 3h 20m
>  Remaining Estimate: 0h
>
> Update these targets to execute ValidatesRunner tests: 
> https://github.com/apache/beam/search?l=Groovy&q=ValidatesRunner&type=&utf8=%E2%9C%93



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to