[ 
https://issues.apache.org/jira/browse/BEAM-1920?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16294789#comment-16294789
 ] 

ASF GitHub Bot commented on BEAM-1920:
--------------------------------------

iemejia closed pull request #4208: [BEAM-1920] Upgrade Spark runner to Spark 2.x
URL: https://github.com/apache/beam/pull/4208
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/build.gradle b/build.gradle
index 7b2522770fd..80b46f9c807 100644
--- a/build.gradle
+++ b/build.gradle
@@ -38,7 +38,7 @@ def grpc_google_common_protos = "0.1.9"
 def hamcrest_version = "1.3"
 def hadoop_version = "2.7.3"
 def jackson_version = "2.8.9"
-def spark_version = "1.6.3"
+def spark_version = "2.2.1"
 def pubsub_grpc_version = "0.1.18"
 def apex_core_version = "3.6.0"
 def apex_malhar_version = "3.4.0"
@@ -121,10 +121,10 @@ ext.library = [
     jackson_core: "com.fasterxml.jackson.core:jackson-core:$jackson_version",
     jackson_databind: 
"com.fasterxml.jackson.core:jackson-databind:$jackson_version",
     jackson_dataformat_yaml: 
"com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$jackson_version",
-    jackson_module_scala: 
"com.fasterxml.jackson.module:jackson-module-scala_2.10:$jackson_version",
+    jackson_module_scala: 
"com.fasterxml.jackson.module:jackson-module-scala_2.11:$jackson_version",
     joda_time: "joda-time:joda-time:2.4",
     junit: "junit:junit:4.12",
-    kafka_clients: "org.apache.kafka:kafka-clients:0.10.1.0",
+    kafka_clients: "org.apache.kafka:kafka-clients:0.11.0.1",
     malhar_library: "org.apache.apex:malhar-library:$apex_malhar_version",
     mockito_core: "org.mockito:mockito-core:1.9.5",
     netty_handler: "io.netty:netty-handler:$netty_version",
@@ -139,9 +139,9 @@ ext.library = [
     slf4j_jdk14: "org.slf4j:slf4j-jdk14:1.7.25",
     slf4j_log4j12: "org.slf4j:slf4j-log4j12:1.7.25",
     snappy_java: "org.xerial.snappy:snappy-java:1.1.4",
-    spark_core: "org.apache.spark:spark-core_2.10:$spark_version",
-    spark_network_common: 
"org.apache.spark:spark-network-common_2.10:$spark_version",
-    spark_streaming: "org.apache.spark:spark-streaming_2.10:$spark_version",
+    spark_core: "org.apache.spark:spark-core_2.11:$spark_version",
+    spark_network_common: 
"org.apache.spark:spark-network-common_2.11:$spark_version",
+    spark_streaming: "org.apache.spark:spark-streaming_2.11:$spark_version",
     stax2_api: "org.codehaus.woodstox:stax2-api:3.1.4",
     woodstox_core_asl: "org.codehaus.woodstox:woodstox-core-asl:4.4.1",
   ],
diff --git a/examples/java/build.gradle b/examples/java/build.gradle
index 501c743cd80..a42e4f1dd30 100644
--- a/examples/java/build.gradle
+++ b/examples/java/build.gradle
@@ -79,7 +79,7 @@ dependencies {
   dataflowRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-google-cloud-dataflow-java", configuration: 
"shadow")
   dataflowStreamingRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-google-cloud-dataflow-java", configuration: 
"shadow")
   directRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-direct-java", configuration: "shadow")
-  flinkRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-flink_2.10", configuration: "shadow")
+  flinkRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-flink_2.11", configuration: "shadow")
   sparkRunnerPreCommit project(path: 
":beam-runners-parent:beam-runners-spark", configuration: "shadow")
   sparkRunnerPreCommit project(path: 
":beam-sdks-parent:beam-sdks-java-parent:beam-sdks-java-io-parent:beam-sdks-java-io-hadoop-file-system",
 configuration: "shadow")
   sparkRunnerPreCommit library.java.spark_streaming
diff --git a/examples/java/pom.xml b/examples/java/pom.xml
index e47e9a12e25..ce4527d6096 100644
--- a/examples/java/pom.xml
+++ b/examples/java/pom.xml
@@ -95,7 +95,7 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.beam</groupId>
-          <artifactId>beam-runners-flink_2.10</artifactId>
+          <artifactId>beam-runners-flink_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
@@ -123,12 +123,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-core_2.10</artifactId>
+          <artifactId>spark-core_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
diff --git a/examples/java8/pom.xml b/examples/java8/pom.xml
index 76518456b63..1bee9afc6e9 100644
--- a/examples/java8/pom.xml
+++ b/examples/java8/pom.xml
@@ -95,7 +95,7 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.beam</groupId>
-          <artifactId>beam-runners-flink_2.10</artifactId>
+          <artifactId>beam-runners-flink_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
@@ -123,12 +123,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-core_2.10</artifactId>
+          <artifactId>spark-core_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
diff --git a/pom.xml b/pom.xml
index 24e15e1b963..8abd9159458 100644
--- a/pom.xml
+++ b/pom.xml
@@ -148,7 +148,7 @@
     <pubsub.version>v1-rev10-1.22.0</pubsub.version>
     <slf4j.version>1.7.25</slf4j.version>
     <spanner.version>0.20.0-beta</spanner.version>
-    <spark.version>1.6.3</spark.version>
+    <spark.version>2.2.1</spark.version>
     <spring.version>4.3.5.RELEASE</spring.version>
     <stax2.version>3.1.4</stax2.version>
     <storage.version>v1-rev71-1.22.0</storage.version>
@@ -739,7 +739,7 @@
 
       <dependency>
         <groupId>org.apache.beam</groupId>
-        <artifactId>beam-runners-flink_2.10</artifactId>
+        <artifactId>beam-runners-flink_2.11</artifactId>
         <version>${project.version}</version>
       </dependency>
 
@@ -1229,7 +1229,7 @@
       </dependency>
       <dependency>
         <groupId>com.fasterxml.jackson.module</groupId>
-        <artifactId>jackson-module-scala_2.10</artifactId>
+        <artifactId>jackson-module-scala_2.11</artifactId>
         <version>${jackson.version}</version>
       </dependency>
 
@@ -1320,19 +1320,19 @@
 
       <dependency>
         <groupId>org.apache.spark</groupId>
-        <artifactId>spark-core_2.10</artifactId>
+        <artifactId>spark-core_2.11</artifactId>
         <version>${spark.version}</version>
       </dependency>
 
       <dependency>
         <groupId>org.apache.spark</groupId>
-        <artifactId>spark-streaming_2.10</artifactId>
+        <artifactId>spark-streaming_2.11</artifactId>
         <version>${spark.version}</version>
       </dependency>
 
       <dependency>
         <groupId>org.apache.spark</groupId>
-        <artifactId>spark-network-common_2.10</artifactId>
+        <artifactId>spark-network-common_2.11</artifactId>
         <version>${spark.version}</version>
       </dependency>
 
diff --git 
a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
 
b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
index 02ba8cb16eb..3ffcb21f166 100644
--- 
a/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
+++ 
b/runners/core-java/src/main/java/org/apache/beam/runners/core/metrics/MetricsContainerStepMap.java
@@ -77,6 +77,25 @@ public void update(String step, MetricsContainerImpl 
container) {
     getContainer(step).update(container);
   }
 
+  @Override
+  public boolean equals(Object o) {
+    if (this == o) {
+      return true;
+    }
+    if (o == null || getClass() != o.getClass()) {
+      return false;
+    }
+
+    MetricsContainerStepMap that = (MetricsContainerStepMap) o;
+
+    return metricsContainers.equals(that.metricsContainers);
+  }
+
+  @Override
+  public int hashCode() {
+    return metricsContainers.hashCode();
+  }
+
   /**
    * Returns {@link MetricResults} based on given
    * {@link MetricsContainerStepMap MetricsContainerStepMaps} of attempted and 
committed metrics.
diff --git a/runners/flink/build.gradle b/runners/flink/build.gradle
index 3085a2b801f..83427c9a673 100644
--- a/runners/flink/build.gradle
+++ b/runners/flink/build.gradle
@@ -51,12 +51,12 @@ dependencies {
   shadow library.java.slf4j_api
   shadow library.java.joda_time
   shadow library.java.commons_compress
-  shadow "org.apache.flink:flink-clients_2.10:$flink_version"
+  shadow "org.apache.flink:flink-clients_2.11:$flink_version"
   shadow "org.apache.flink:flink-core:$flink_version"
   shadow "org.apache.flink:flink-metrics-core:$flink_version"
   shadow "org.apache.flink:flink-java:$flink_version"
-  shadow "org.apache.flink:flink-runtime_2.10:$flink_version"
-  shadow "org.apache.flink:flink-streaming-java_2.10:$flink_version"
+  shadow "org.apache.flink:flink-runtime_2.11:$flink_version"
+  shadow "org.apache.flink:flink-streaming-java_2.11:$flink_version"
   testCompile project(path: 
":beam-sdks-parent:beam-sdks-java-parent:beam-sdks-java-core", configuration: 
"shadowTest")
   testCompile 
project(":beam-model-parent:beam-model-fn-execution").sourceSets.test.output
   testCompile 
project(":beam-runners-parent:beam-runners-core-java").sourceSets.test.output
@@ -67,9 +67,9 @@ dependencies {
   testCompile library.java.google_api_services_bigquery
   testCompile library.java.jackson_dataformat_yaml
   testCompile "org.apache.flink:flink-core:$flink_version:tests"
-  testCompile "org.apache.flink:flink-runtime_2.10:$flink_version:tests"
-  testCompile "org.apache.flink:flink-streaming-java_2.10:$flink_version:tests"
-  testCompile "org.apache.flink:flink-test-utils_2.10:$flink_version"
+  testCompile "org.apache.flink:flink-runtime_2.11:$flink_version:tests"
+  testCompile "org.apache.flink:flink-streaming-java_2.11:$flink_version:tests"
+  testCompile "org.apache.flink:flink-test-utils_2.11:$flink_version"
 }
 
 task packageTests(type: Jar) {
diff --git a/runners/flink/pom.xml b/runners/flink/pom.xml
index 7840c328c9a..02cf88d6ff3 100644
--- a/runners/flink/pom.xml
+++ b/runners/flink/pom.xml
@@ -26,7 +26,7 @@
     <relativePath>../pom.xml</relativePath>
   </parent>
 
-  <artifactId>beam-runners-flink_2.10</artifactId>
+  <artifactId>beam-runners-flink_2.11</artifactId>
   <name>Apache Beam :: Runners :: Flink</name>
   <packaging>jar</packaging>
 
@@ -165,7 +165,7 @@
     <!-- Flink dependencies -->
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-clients_2.10</artifactId>
+      <artifactId>flink-clients_2.11</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
@@ -189,13 +189,13 @@
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-runtime_2.10</artifactId>
+      <artifactId>flink-runtime_2.11</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-streaming-java_2.10</artifactId>
+      <artifactId>flink-streaming-java_2.11</artifactId>
       <version>${flink.version}</version>
     </dependency>
 
@@ -210,7 +210,7 @@
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-runtime_2.10</artifactId>
+      <artifactId>flink-runtime_2.11</artifactId>
       <version>${flink.version}</version>
       <type>test-jar</type>
       <scope>test</scope>
@@ -331,7 +331,7 @@
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-streaming-java_2.10</artifactId>
+      <artifactId>flink-streaming-java_2.11</artifactId>
       <version>${flink.version}</version>
       <scope>test</scope>
       <type>test-jar</type>
@@ -339,7 +339,7 @@
 
     <dependency>
       <groupId>org.apache.flink</groupId>
-      <artifactId>flink-test-utils_2.10</artifactId>
+      <artifactId>flink-test-utils_2.11</artifactId>
       <version>${flink.version}</version>
       <scope>test</scope>
       <exclusions>
diff --git a/runners/spark/build.gradle b/runners/spark/build.gradle
index 3f32da11e80..f0d1b755518 100644
--- a/runners/spark/build.gradle
+++ b/runners/spark/build.gradle
@@ -60,6 +60,7 @@ dependencies {
   shadow library.java.joda_time
   shadow library.java.commons_text
   shadow "io.dropwizard.metrics:metrics-core:3.1.2"
+  shadow "com.fasterxml.jackson.module:jackson-module-scala_2.11:2.8.9"
   provided library.java.spark_core
   provided library.java.spark_streaming
   provided library.java.spark_network_common
@@ -70,7 +71,7 @@ dependencies {
   provided library.java.commons_io_2x
   provided library.java.hamcrest_core
   provided "org.apache.zookeeper:zookeeper:3.4.6"
-  provided "org.scala-lang:scala-library:2.10.5"
+  provided "org.scala-lang:scala-library:2.11.8"
   provided "com.esotericsoftware.kryo:kryo:2.21"
   testCompile project(path: 
":beam-sdks-parent:beam-sdks-java-parent:beam-sdks-java-io-parent:beam-sdks-java-io-kafka",
 configuration: "shadow")
   testCompile project(path: 
":beam-sdks-parent:beam-sdks-java-parent:beam-sdks-java-core", configuration: 
"shadowTest")
@@ -81,7 +82,7 @@ dependencies {
   testCompile library.java.junit
   testCompile library.java.mockito_core
   testCompile library.java.jackson_dataformat_yaml
-  testCompile "org.apache.kafka:kafka_2.10:0.9.0.1"
+  testCompile "org.apache.kafka:kafka_2.11:0.11.0.1"
 }
 
 configurations.testRuntimeClasspath {
diff --git a/runners/spark/pom.xml b/runners/spark/pom.xml
index c4dc4f4f9d2..3670cdeb3ed 100644
--- a/runners/spark/pom.xml
+++ b/runners/spark/pom.xml
@@ -34,7 +34,7 @@
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
-    <kafka.version>0.9.0.1</kafka.version>
+    <kafka.version>0.11.0.1</kafka.version>
     <dropwizard.metrics.version>3.1.2</dropwizard.metrics.version>
   </properties>
 
@@ -136,17 +136,17 @@
   <dependencies>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_2.10</artifactId>
+      <artifactId>spark-core_2.11</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-streaming_2.10</artifactId>
+      <artifactId>spark-streaming_2.11</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-network-common_2.10</artifactId>
+      <artifactId>spark-network-common_2.11</artifactId>
       <scope>provided</scope>
     </dependency>
     <dependency>
@@ -159,17 +159,27 @@
           <groupId>jdk.tools</groupId>
           <artifactId>jdk.tools</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-mapreduce-client-core</artifactId>
       <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>javax.servlet</groupId>
+          <artifactId>servlet-api</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
     <dependency>
-      <groupId>com.esotericsoftware.kryo</groupId>
-      <artifactId>kryo</artifactId>
-      <version>2.21</version>
+      <groupId>com.esotericsoftware</groupId>
+      <artifactId>kryo-shaded</artifactId>
+      <version>3.0.3</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
@@ -189,6 +199,11 @@
       <groupId>com.fasterxml.jackson.core</groupId>
       <artifactId>jackson-annotations</artifactId>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.module</groupId>
+      <artifactId>jackson-module-scala_2.11</artifactId>
+      <version>${jackson.version}</version>
+    </dependency>
     <dependency>
       <groupId>org.apache.avro</groupId>
       <artifactId>avro</artifactId>
@@ -231,7 +246,7 @@
     <dependency>
       <groupId>org.scala-lang</groupId>
       <artifactId>scala-library</artifactId>
-      <version>2.10.5</version>
+      <version>2.11.8</version>
       <scope>provided</scope>
     </dependency>
     <dependency>
@@ -279,8 +294,26 @@
     <dependency>
       <groupId>org.apache.kafka</groupId>
       <artifactId>kafka-clients</artifactId>
-      <version>0.9.0.1</version>
+      <version>${kafka.clients.version}</version>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>net.jpountz.lz4</groupId>
+          <artifactId>lz4</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.kafka</groupId>
+      <artifactId>kafka_2.11</artifactId>
+      <version>${kafka.version}</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>net.jpountz.lz4</groupId>
+          <artifactId>lz4</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
 
     <!-- test dependencies -->
@@ -300,12 +333,6 @@
       <artifactId>hamcrest-all</artifactId>
       <scope>provided</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.kafka</groupId>
-      <artifactId>kafka_2.10</artifactId>
-      <version>${kafka.version}</version>
-      <scope>test</scope>
-    </dependency>
 
     <!-- Depend on test jar to scan for ValidatesRunner tests -->
     <dependency>
@@ -424,7 +451,42 @@
             </execution>
           </executions>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-dependency-plugin</artifactId>
+          <configuration>
+            <ignoredUnusedDeclaredDependencies>
+              <ignoredUnusedDeclaredDependency>
+                com.fasterxml.jackson.module:jackson-module-scala_2.11
+              </ignoredUnusedDeclaredDependency>
+            </ignoredUnusedDeclaredDependencies>
+          </configuration>
+        </plugin>
       </plugins>
     </pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-enforcer-plugin</artifactId>
+          <executions>
+            <execution>
+              <id>enforce</id>
+              <goals>
+                <goal>enforce</goal>
+              </goals>
+              <configuration>
+                <rules>
+                  <enforceBytecodeVersion>
+                    <maxJdkVersion>1.8</maxJdkVersion>
+                  </enforceBytecodeVersion>
+                  <requireJavaVersion>
+                    <version>[1.8,)</version>
+                  </requireJavaVersion>
+                </rules>
+              </configuration>
+            </execution>
+          </executions>
+        </plugin>
+      </plugins>
   </build>
 </project>
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SparkUnboundedSource.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SparkUnboundedSource.java
index 26af0c02ef9..adcb56c340f 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SparkUnboundedSource.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SparkUnboundedSource.java
@@ -22,6 +22,8 @@
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.Collections;
+import java.util.Iterator;
+
 import org.apache.beam.runners.core.construction.SerializablePipelineOptions;
 import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
 import org.apache.beam.runners.spark.SparkPipelineOptions;
@@ -309,8 +311,8 @@ public Metadata call(Tuple2<Iterable<byte[]>, Metadata> t2) 
throws Exception {
       implements FlatMapFunction<Tuple2<Iterable<byte[]>, Metadata>, byte[]> {
 
     @Override
-    public Iterable<byte[]> call(Tuple2<Iterable<byte[]>, Metadata> t2) throws 
Exception {
-      return t2._1();
+    public Iterator<byte[]> call(Tuple2<Iterable<byte[]>, Metadata> t2) throws 
Exception {
+      return t2._1().iterator();
     }
   }
 }
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
index 1fb8700fe25..b7f0083ced5 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkGroupAlsoByWindowViaWindowSet.java
@@ -534,7 +534,7 @@ public Boolean call(
                           keyCoder, wvCoder.getValueCoder(), 
wvCoder.getWindowCoder());
 
               @Override
-              public Iterable<WindowedValue<KV<K, Iterable<InputT>>>> call(
+              public java.util.Iterator<WindowedValue<KV<K, 
Iterable<InputT>>>> call(
                   final Tuple2<
                           /*K*/ ByteArray,
                           Tuple2<StateAndTimers, /*WV<KV<K, Itr<I>>>*/ 
List<byte[]>>>
@@ -542,7 +542,8 @@ public Boolean call(
                   throws Exception {
                 // drop the state since it is already persisted at this point.
                 // return in serialized form.
-                return CoderHelpers.fromByteArrays(t2._2()._2(), 
windowedValueKeyValueCoder);
+                return CoderHelpers.fromByteArrays(t2._2()._2(), 
windowedValueKeyValueCoder)
+                    .iterator();
               }
             });
   }
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkTimerInternals.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkTimerInternals.java
index 4fd8146d21c..f98f5154ddd 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkTimerInternals.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/stateful/SparkTimerInternals.java
@@ -23,6 +23,7 @@
 import com.google.common.collect.Sets;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -35,7 +36,6 @@
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.joda.time.Instant;
 
-
 /**
  * An implementation of {@link TimerInternals} for the SparkRunner.
  */
@@ -101,8 +101,9 @@ public static SparkTimerInternals global(Map<Integer, 
SparkWatermarks> watermark
     return timers;
   }
 
-  void addTimers(Iterable<TimerData> timers) {
-    for (TimerData timer: timers) {
+  void addTimers(Iterator<TimerData> timers) {
+    while (timers.hasNext()) {
+      TimerData timer = timers.next();
       this.timers.add(timer);
     }
   }
@@ -168,9 +169,9 @@ public void deleteTimer(StateNamespace namespace, String 
timerId) {
     return CoderHelpers.toByteArrays(timers, timerDataCoder);
   }
 
-  public static Iterable<TimerData> deserializeTimers(
+  public static Iterator<TimerData> deserializeTimers(
       Collection<byte[]> serTimers, TimerDataCoder timerDataCoder) {
-    return CoderHelpers.fromByteArrays(serTimers, timerDataCoder);
+    return CoderHelpers.fromByteArrays(serTimers, timerDataCoder).iterator();
   }
 
   @Override
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
index 72995833982..bbb78f6c71a 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/MultiDoFnFunction.java
@@ -101,7 +101,7 @@ public MultiDoFnFunction(
   }
 
   @Override
-  public Iterable<Tuple2<TupleTag<?>, WindowedValue<?>>> call(
+  public Iterator<Tuple2<TupleTag<?>, WindowedValue<?>>> call(
       Iterator<WindowedValue<InputT>> iter) throws Exception {
 
     DoFnOutputManager outputManager = new DoFnOutputManager();
@@ -151,7 +151,8 @@ public TimerInternals timerInternals() {
     return new SparkProcessContext<>(
         doFn, doFnRunnerWithMetrics, outputManager,
         stateful ? new TimerDataIterator(timerInternals) :
-            
Collections.<TimerInternals.TimerData>emptyIterator()).processPartition(iter);
+            
Collections.<TimerInternals.TimerData>emptyIterator()).processPartition(iter)
+              .iterator();
   }
 
   private static class TimerDataIterator implements 
Iterator<TimerInternals.TimerData> {
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
index fcf438c96fd..e5b66e0e854 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/SparkGroupAlsoByWindowViaOutputBufferFn.java
@@ -20,6 +20,7 @@
 
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Iterator;
 import java.util.List;
 import 
org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupAlsoByWindow;
 import org.apache.beam.runners.core.InMemoryTimerInternals;
@@ -71,7 +72,7 @@ public SparkGroupAlsoByWindowViaOutputBufferFn(
   }
 
   @Override
-  public Iterable<WindowedValue<KV<K, Iterable<InputT>>>> call(
+  public Iterator<WindowedValue<KV<K, Iterable<InputT>>>> call(
       WindowedValue<KV<K, Iterable<WindowedValue<InputT>>>> windowedValue) 
throws Exception {
     K key = windowedValue.getValue().getKey();
     Iterable<WindowedValue<InputT>> values = 
windowedValue.getValue().getValue();
@@ -115,7 +116,7 @@ public SparkGroupAlsoByWindowViaOutputBufferFn(
 
     reduceFnRunner.persist();
 
-    return outputter.getOutputs();
+    return outputter.getOutputs().iterator();
   }
 
   private void fireEligibleTimers(InMemoryTimerInternals timerInternals,
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TranslationUtils.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TranslationUtils.java
index 90f5ee3b2e0..42867b6e992 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TranslationUtils.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TranslationUtils.java
@@ -149,7 +149,7 @@ public T2 call(Tuple2<T1, T2> v1) throws Exception {
   public static <K, V> PairFlatMapFunction<Iterator<KV<K, V>>, K, V> 
toPairFlatMapFunction() {
     return new PairFlatMapFunction<Iterator<KV<K, V>>, K, V>() {
       @Override
-      public Iterable<Tuple2<K, V>> call(final Iterator<KV<K, V>> itr) {
+      public Iterator<Tuple2<K, V>> call(final Iterator<KV<K, V>> itr) {
         final Iterator<Tuple2<K, V>> outputItr =
             Iterators.transform(
                 itr,
@@ -160,13 +160,7 @@ public T2 call(Tuple2<T1, T2> v1) throws Exception {
                     return new Tuple2<>(kv.getKey(), kv.getValue());
                   }
                 });
-        return new Iterable<Tuple2<K, V>>() {
-
-          @Override
-          public Iterator<Tuple2<K, V>> iterator() {
-            return outputItr;
-          }
-        };
+        return outputItr;
       }
     };
   }
@@ -185,7 +179,7 @@ public T2 call(Tuple2<T1, T2> v1) throws Exception {
   static <K, V> FlatMapFunction<Iterator<Tuple2<K, V>>, KV<K, V>> 
fromPairFlatMapFunction() {
     return new FlatMapFunction<Iterator<Tuple2<K, V>>, KV<K, V>>() {
       @Override
-      public Iterable<KV<K, V>> call(Iterator<Tuple2<K, V>> itr) {
+      public Iterator<KV<K, V>> call(Iterator<Tuple2<K, V>> itr) {
         final Iterator<KV<K, V>> outputItr =
             Iterators.transform(
                 itr,
@@ -195,12 +189,7 @@ public T2 call(Tuple2<T1, T2> v1) throws Exception {
                     return KV.of(t2._1(), t2._2());
                   }
                 });
-        return new Iterable<KV<K, V>>() {
-          @Override
-          public Iterator<KV<K, V>> iterator() {
-            return outputItr;
-          }
-        };
+        return outputItr;
       }
     };
   }
@@ -351,7 +340,7 @@ public void call(T t) throws Exception {
     return new PairFlatMapFunction<Iterator<T>, K, V>() {
 
       @Override
-      public Iterable<Tuple2<K, V>> call(Iterator<T> itr) throws Exception {
+      public Iterator<Tuple2<K, V>> call(Iterator<T> itr) throws Exception {
         final Iterator<Tuple2<K, V>> outputItr =
             Iterators.transform(
                 itr,
@@ -366,13 +355,7 @@ public void call(T t) throws Exception {
                     }
                   }
                 });
-        return new Iterable<Tuple2<K, V>>() {
-
-          @Override
-          public Iterator<Tuple2<K, V>> iterator() {
-            return outputItr;
-          }
-        };
+        return outputItr;
       }
     };
   }
@@ -394,7 +377,7 @@ public void call(T t) throws Exception {
     return new FlatMapFunction<Iterator<InputT>, OutputT>() {
 
       @Override
-      public Iterable<OutputT> call(Iterator<InputT> itr) throws Exception {
+      public Iterator<OutputT> call(Iterator<InputT> itr) throws Exception {
         final Iterator<OutputT> outputItr =
             Iterators.transform(
                 itr,
@@ -409,13 +392,7 @@ public OutputT apply(InputT t) {
                     }
                   }
                 });
-        return new Iterable<OutputT>() {
-
-          @Override
-          public Iterator<OutputT> iterator() {
-            return outputItr;
-          }
-        };
+        return outputItr;
       }
     };
   }
diff --git 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/util/GlobalWatermarkHolder.java
 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/util/GlobalWatermarkHolder.java
index 8ad3ca41533..101bb33cc55 100644
--- 
a/runners/spark/src/main/java/org/apache/beam/runners/spark/util/GlobalWatermarkHolder.java
+++ 
b/runners/spark/src/main/java/org/apache/beam/runners/spark/util/GlobalWatermarkHolder.java
@@ -39,7 +39,6 @@
 import org.apache.spark.storage.BlockId;
 import org.apache.spark.storage.BlockManager;
 import org.apache.spark.storage.BlockResult;
-import org.apache.spark.storage.BlockStore;
 import org.apache.spark.storage.StorageLevel;
 import org.apache.spark.streaming.api.java.JavaBatchInfo;
 import org.apache.spark.streaming.api.java.JavaStreamingListener;
@@ -48,9 +47,11 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Option;
+import scala.collection.Iterator;
+import scala.reflect.ClassTag;
 
 /**
- * A {@link BlockStore} variable to hold the global watermarks for a 
micro-batch.
+ * A store to hold the global watermarks for a micro-batch.
  *
  * <p>For each source, holds a queue for the watermarks of each micro-batch 
that was read,
  * and advances the watermarks according to the queue (first-in-first-out).
@@ -61,6 +62,8 @@
 
   private static final Map<Integer, Queue<SparkWatermarks>> sourceTimes = new 
HashMap<>();
   private static final BlockId WATERMARKS_BLOCK_ID = 
BlockId.apply("broadcast_0WATERMARKS");
+  private static final ClassTag<Map> WATERMARKS_TAG =
+      scala.reflect.ClassManifestFactory.fromClass(Map.class);
 
   // a local copy of the watermarks is stored on the driver node so that it 
can be
   // accessed in test mode instead of fetching blocks remotely
@@ -245,7 +248,8 @@ private static void writeRemoteWatermarkBlock(
     blockManager.removeBlock(WATERMARKS_BLOCK_ID, true);
     // if an executor tries to fetch the watermark block here, it will fail to 
do so since
     // the watermark block has just been removed, but the new copy has not 
been put yet.
-    blockManager.putSingle(WATERMARKS_BLOCK_ID, newWatermarks, 
StorageLevel.MEMORY_ONLY(), true);
+    blockManager.putSingle(WATERMARKS_BLOCK_ID, newWatermarks, 
StorageLevel.MEMORY_ONLY(),
+        true, WATERMARKS_TAG);
     // if an executor tries to fetch the watermark block here, it still may 
fail to do so since
     // the put operation might not have been executed yet
     // see also https://issues.apache.org/jira/browse/BEAM-2789
@@ -262,7 +266,8 @@ private static void writeRemoteWatermarkBlock(
           WATERMARKS_BLOCK_ID,
           empty,
           StorageLevel.MEMORY_ONLY(),
-          true);
+          true,
+          WATERMARKS_TAG);
       return empty;
     } else {
       return watermarks;
@@ -270,9 +275,16 @@ private static void writeRemoteWatermarkBlock(
   }
 
   private static Map<Integer, SparkWatermarks> 
fetchSparkWatermarks(BlockManager blockManager) {
-    final Option<BlockResult> blockResultOption = 
blockManager.getRemote(WATERMARKS_BLOCK_ID);
+    final Option<BlockResult> blockResultOption = 
blockManager.get(WATERMARKS_BLOCK_ID,
+        WATERMARKS_TAG);
     if (blockResultOption.isDefined()) {
-      return (Map<Integer, SparkWatermarks>) 
blockResultOption.get().data().next();
+      Iterator<Object> data = blockResultOption.get().data();
+      Map<Integer, SparkWatermarks> next = (Map<Integer, SparkWatermarks>) 
data.next();
+      // Spark 2 only triggers completion at the end of the iterator.
+      while (data.hasNext()) {
+        // NO-OP
+      }
+      return next;
     } else {
       return null;
     }
diff --git a/sdks/java/io/hadoop/jdk1.8-tests/build.gradle 
b/sdks/java/io/hadoop/jdk1.8-tests/build.gradle
index 14ea8975dd3..e2ca9ea87c1 100644
--- a/sdks/java/io/hadoop/jdk1.8-tests/build.gradle
+++ b/sdks/java/io/hadoop/jdk1.8-tests/build.gradle
@@ -50,6 +50,7 @@ dependencies {
   shadow project(path: 
":beam-sdks-parent:beam-sdks-java-parent:beam-sdks-java-io-parent:beam-sdks-java-io-hadoop-parent:beam-sdks-java-io-hadoop-input-format",
 configuration: "shadow")
   shadow library.java.slf4j_api
   shadow "org.elasticsearch:elasticsearch-hadoop:$elastic_search_version"
+  shadow "com.fasterxml.jackson.module:jackson-module-scala_2.11:2.8.9"
   shadow library.java.commons_io_2x
   provided library.java.hadoop_common
   provided library.java.hadoop_mapreduce_client_core
diff --git a/sdks/java/io/hadoop/jdk1.8-tests/pom.xml 
b/sdks/java/io/hadoop/jdk1.8-tests/pom.xml
index 550d31d00e9..f148f13bf5c 100644
--- a/sdks/java/io/hadoop/jdk1.8-tests/pom.xml
+++ b/sdks/java/io/hadoop/jdk1.8-tests/pom.xml
@@ -34,6 +34,21 @@
   <description>Integration tests and junits which need JDK1.8.</description>
 
   <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-dependency-plugin</artifactId>
+          <configuration>
+            <ignoredUnusedDeclaredDependencies>
+              <ignoredUnusedDeclaredDependency>
+                com.fasterxml.jackson.module:jackson-module-scala_2.11
+              </ignoredUnusedDeclaredDependency>
+            </ignoredUnusedDeclaredDependencies>
+          </configuration>
+        </plugin>
+      </plugins>
+    </pluginManagement>
     <plugins>
       <!-- Overridden enforcer plugin for JDK1.8 for running tests -->
       <plugin>
@@ -106,12 +121,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-core_2.10</artifactId>
+          <artifactId>spark-core_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
@@ -156,6 +171,11 @@
       <groupId>org.slf4j</groupId>
       <artifactId>slf4j-api</artifactId>
     </dependency>
+    <dependency>
+      <groupId>com.fasterxml.jackson.module</groupId>
+      <artifactId>jackson-module-scala_2.11</artifactId>
+      <version>2.8.9</version>
+    </dependency>
 
     <!-- compile dependencies -->
     <dependency>
diff --git a/sdks/java/io/jdbc/pom.xml b/sdks/java/io/jdbc/pom.xml
index dd4fbabd9fc..e6bb357a089 100644
--- a/sdks/java/io/jdbc/pom.xml
+++ b/sdks/java/io/jdbc/pom.xml
@@ -48,12 +48,12 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-core_2.10</artifactId>
+          <artifactId>spark-core_2.11</artifactId>
           <scope>runtime</scope>
           <exclusions>
             <exclusion>
diff --git a/sdks/java/javadoc/pom.xml b/sdks/java/javadoc/pom.xml
index 85440ffe1e0..1fb4a6786dd 100644
--- a/sdks/java/javadoc/pom.xml
+++ b/sdks/java/javadoc/pom.xml
@@ -64,7 +64,7 @@
 
     <dependency>
       <groupId>org.apache.beam</groupId>
-      <artifactId>beam-runners-flink_2.10</artifactId>
+      <artifactId>beam-runners-flink_2.11</artifactId>
     </dependency>
 
     <dependency>
@@ -235,12 +235,12 @@
     <!-- optional -->
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_2.10</artifactId>
+      <artifactId>spark-core_2.11</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.spark</groupId>
-      <artifactId>spark-streaming_2.10</artifactId>
+      <artifactId>spark-streaming_2.11</artifactId>
     </dependency>
   </dependencies>
 
diff --git 
a/sdks/java/maven-archetypes/examples-java8/src/main/resources/archetype-resources/pom.xml
 
b/sdks/java/maven-archetypes/examples-java8/src/main/resources/archetype-resources/pom.xml
index ffdb0660466..4a35cb14497 100644
--- 
a/sdks/java/maven-archetypes/examples-java8/src/main/resources/archetype-resources/pom.xml
+++ 
b/sdks/java/maven-archetypes/examples-java8/src/main/resources/archetype-resources/pom.xml
@@ -215,7 +215,7 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.beam</groupId>
-          <artifactId>beam-runners-flink_2.10</artifactId>
+          <artifactId>beam-runners-flink_2.11</artifactId>
           <version>${beam.version}</version>
           <scope>runtime</scope>
         </dependency>
@@ -254,7 +254,7 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <version>${spark.version}</version>
           <scope>runtime</scope>
           <exclusions>
@@ -266,7 +266,7 @@
         </dependency>
         <dependency>
           <groupId>com.fasterxml.jackson.module</groupId>
-          <artifactId>jackson-module-scala_2.10</artifactId>
+          <artifactId>jackson-module-scala_2.11</artifactId>
           <version>${jackson.version}</version>
           <scope>runtime</scope>
         </dependency>
diff --git 
a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/pom.xml
 
b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/pom.xml
index b8b9c9f0fa4..5aa80ff2591 100644
--- 
a/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/pom.xml
+++ 
b/sdks/java/maven-archetypes/examples/src/main/resources/archetype-resources/pom.xml
@@ -214,7 +214,7 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.beam</groupId>
-          <artifactId>beam-runners-flink_2.10</artifactId>
+          <artifactId>beam-runners-flink_2.11</artifactId>
           <version>${beam.version}</version>
           <scope>runtime</scope>
         </dependency>
@@ -240,7 +240,7 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <version>${spark.version}</version>
           <scope>runtime</scope>
           <exclusions>
@@ -252,7 +252,7 @@
         </dependency>
         <dependency>
           <groupId>com.fasterxml.jackson.module</groupId>
-          <artifactId>jackson-module-scala_2.10</artifactId>
+          <artifactId>jackson-module-scala_2.11</artifactId>
           <version>${jackson.version}</version>
           <scope>runtime</scope>
         </dependency>
diff --git a/sdks/java/nexmark/pom.xml b/sdks/java/nexmark/pom.xml
index 8210ddc0474..15f51049ec9 100644
--- a/sdks/java/nexmark/pom.xml
+++ b/sdks/java/nexmark/pom.xml
@@ -65,7 +65,7 @@
       <dependencies>
         <dependency>
           <groupId>org.apache.beam</groupId>
-          <artifactId>beam-runners-flink_2.10</artifactId>
+          <artifactId>beam-runners-flink_2.11</artifactId>
           <scope>runtime</scope>
         </dependency>
       </dependencies>
@@ -81,13 +81,13 @@
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-streaming_2.10</artifactId>
+          <artifactId>spark-streaming_2.11</artifactId>
           <version>${spark.version}</version>
           <scope>runtime</scope>
         </dependency>
         <dependency>
           <groupId>org.apache.spark</groupId>
-          <artifactId>spark-core_2.10</artifactId>
+          <artifactId>spark-core_2.11</artifactId>
           <version>${spark.version}</version>
           <scope>runtime</scope>
           <exclusions>
diff --git a/settings.gradle b/settings.gradle
index 1ed6564cbca..ad98f87fc56 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -80,7 +80,7 @@ include 
':beam-runners-parent:beam-runners-reference-parent:beam-runners-referen
 include 
':beam-runners-parent:beam-runners-reference-parent:beam-runners-reference-job-orchestrator'
 include ':beam-runners-parent:beam-runners-reference-parent'
 include ':beam-runners-parent:beam-runners-direct-java'
-include ':beam-runners-parent:beam-runners-flink_2.10'
+include ':beam-runners-parent:beam-runners-flink_2.11'
 include ':beam-runners-parent:beam-runners-google-cloud-dataflow-java'
 include ':beam-runners-parent:beam-runners-local-java-core'
 include ':beam-runners-parent:beam-runners-spark'
@@ -158,7 +158,7 @@ 
project(':beam-runners-parent:beam-runners-reference-parent:beam-runners-referen
 
project(':beam-runners-parent:beam-runners-reference-parent:beam-runners-reference-job-orchestrator').projectDir
 = "$rootDir/runners/reference/job-server" as File
 project(':beam-runners-parent:beam-runners-reference-parent').projectDir = 
"$rootDir/runners/reference" as File
 project(':beam-runners-parent:beam-runners-direct-java').projectDir = 
"$rootDir/runners/direct-java" as File
-project(':beam-runners-parent:beam-runners-flink_2.10').projectDir = 
"$rootDir/runners/flink" as File
+project(':beam-runners-parent:beam-runners-flink_2.11').projectDir = 
"$rootDir/runners/flink" as File
 
project(':beam-runners-parent:beam-runners-google-cloud-dataflow-java').projectDir
 = "$rootDir/runners/google-cloud-dataflow-java" as File
 project(':beam-runners-parent:beam-runners-local-java-core').projectDir = 
"$rootDir/runners/local-java" as File
 project(':beam-runners-parent:beam-runners-spark').projectDir = 
"$rootDir/runners/spark" as File


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


> Add Spark 2.x support in Spark runner
> -------------------------------------
>
>                 Key: BEAM-1920
>                 URL: https://issues.apache.org/jira/browse/BEAM-1920
>             Project: Beam
>          Issue Type: Improvement
>          Components: runner-spark
>            Reporter: Jean-Baptiste Onofré
>            Assignee: Jean-Baptiste Onofré
>
> I have a branch working with both Spark 1 and Spark 2 backend.
> I'm preparing a pull request about that.



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to