[ 
https://issues.apache.org/jira/browse/BEAM-6102?focusedWorklogId=170097&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-170097
 ]

ASF GitHub Bot logged work on BEAM-6102:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 28/Nov/18 02:19
            Start Date: 28/Nov/18 02:19
    Worklog Time Spent: 10m 
      Work Description: lukecwik closed pull request #7145: [BEAM-6102] Fix 
several packages that were being bundled without relocation within the Dataflow 
worker.
URL: https://github.com/apache/beam/pull/7145
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git 
a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy 
b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
index ba17983856ce..38c52add0407 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
@@ -101,6 +101,14 @@ class BeamModulePlugin implements Plugin<Project> {
      */
     boolean validateShadowJar = true
 
+    /**
+     * The set of excludes that should be used during validation of the shadow 
jar. Projects should override
+     * the default with the most specific set of excludes that is valid for 
the contents of its shaded jar.
+     *
+     * By default we exclude any class underneath the org.apache.beam 
namespace.
+     */
+    List<String> shadowJarValidationExcludes = ["org/apache/beam/**"]
+
     /**
      * The shadowJar / shadowTestJar tasks execute the following closure to 
configure themselves.
      * Users can compose their closure with the default closure via:
@@ -117,6 +125,17 @@ class BeamModulePlugin implements Plugin<Project> {
     boolean publish = true
   }
 
+  /** A class defining the set of configurable properties accepted by 
applyPortabilityNature. */
+  class PortabilityNatureConfiguration {
+    /**
+     * The set of excludes that should be used during validation of the shadow 
jar. Projects should override
+     * the default with the most specific set of excludes that is valid for 
the contents of its shaded jar.
+     *
+     * By default we exclude any class underneath the org.apache.beam 
namespace.
+     */
+    List<String> shadowJarValidationExcludes = ["org/apache/beam/**"]
+  }
+
   // A class defining the set of configurable properties for 
createJavaExamplesArchetypeValidationTask
   class JavaExamplesArchetypeValidationConfiguration {
     // Type [Quickstart, MobileGaming] for the postrelease validation is 
required.
@@ -759,7 +778,7 @@ class BeamModulePlugin implements Plugin<Project> {
       }
 
       if (configuration.validateShadowJar) {
-        project.task('validateShadedJarDoesntLeakNonOrgApacheBeamClasses', 
dependsOn: 'shadowJar') {
+        project.task('validateShadedJarDoesntLeakNonProjectClasses', 
dependsOn: 'shadowJar') {
           ext.outFile = project.file("${project.reportsDir}/${name}.out")
           inputs.files project.configurations.shadow.artifacts.files
           outputs.files outFile
@@ -767,19 +786,21 @@ class BeamModulePlugin implements Plugin<Project> {
             project.configurations.shadow.artifacts.files.each {
               FileTree exposedClasses = project.zipTree(it).matching {
                 include "**/*.class"
-                exclude "org/apache/beam/**"
                 // BEAM-5919: Exclude paths for Java 9 multi-release jars.
                 exclude "META-INF/versions/*/module-info.class"
-                exclude "META-INF/versions/*/org/apache/beam/**"
+                configuration.shadowJarValidationExcludes.each {
+                  exclude "$it"
+                  exclude "META-INF/versions/*/$it"
+                }
               }
               outFile.text = exposedClasses.files
               if (exposedClasses.files) {
-                throw new GradleException("$it exposed classes outside of 
org.apache.beam namespace: ${exposedClasses.files}")
+                throw new GradleException("$it exposed classes outside of 
${configuration.shadowJarValidationExcludes}: ${exposedClasses.files}")
               }
             }
           }
         }
-        project.tasks.check.dependsOn 
project.tasks.validateShadedJarDoesntLeakNonOrgApacheBeamClasses
+        project.tasks.check.dependsOn 
project.tasks.validateShadedJarDoesntLeakNonProjectClasses
       }
 
       if ((isRelease(project) || project.hasProperty('publishing')) &&
@@ -1358,14 +1379,19 @@ artifactId=${project.name}
 
     project.ext.applyPortabilityNature = {
       println "applyPortabilityNature with " + (it ? "$it" : "default 
configuration") + " for project $project.name"
-      project.ext.applyJavaNature(enableFindbugs: false, shadowClosure: 
GrpcVendoring.shadowClosure() << {
-        // We perform all the code relocations but don't include
-        // any of the actual dependencies since they will be supplied
-        // by org.apache.beam:beam-vendor-grpc-v1_13_1:0.1
-        dependencies {
-          exclude(dependency(".*:.*"))
-        }
-      })
+      PortabilityNatureConfiguration configuration = it ? it as 
PortabilityNatureConfiguration : new PortabilityNatureConfiguration()
+
+      project.ext.applyJavaNature(
+              enableFindbugs: false,
+              shadowJarValidationExcludes: it.shadowJarValidationExcludes,
+              shadowClosure: GrpcVendoring.shadowClosure() << {
+                // We perform all the code relocations but don't include
+                // any of the actual dependencies since they will be supplied
+                // by org.apache.beam:beam-vendor-grpc-v1_13_1:0.1
+                dependencies {
+                  include(dependency { return false })
+                }
+              })
 
       // Don't force modules here because we don't want to take the shared 
declarations in build_rules.gradle
       // because we would like to have the freedom to choose which versions of 
dependencies we
@@ -1399,22 +1425,6 @@ artifactId=${project.name}
       }
 
       project.dependencies GrpcVendoring.dependenciesClosure() << { shadow 
'org.apache.beam:beam-vendor-grpc-1_13_1:0.1' }
-
-      project.task('validateShadedJarDoesntExportVendoredDependencies', 
dependsOn: 'shadowJar') {
-        ext.outFile = project.file("${project.reportsDir}/${name}.out")
-        inputs.files project.configurations.shadow.artifacts.files
-        outputs.files outFile
-        doLast {
-          project.configurations.shadow.artifacts.files.each {
-            FileTree exportedClasses = project.zipTree(it).matching { include 
"org/apache/beam/vendor/**" }
-            outFile.text = exportedClasses.files
-            if (exportedClasses.files) {
-              throw new GradleException("$it exported classes inside of 
org.apache.beam.vendor namespace: ${exportedClasses.files}")
-            }
-          }
-        }
-      }
-      project.tasks.check.dependsOn 
project.tasks.validateShadedJarDoesntExportVendoredDependencies
     }
 
     /** 
***********************************************************************************************/
diff --git a/model/fn-execution/build.gradle b/model/fn-execution/build.gradle
index 3194fcf1279f..faf674c8aabf 100644
--- a/model/fn-execution/build.gradle
+++ b/model/fn-execution/build.gradle
@@ -17,7 +17,7 @@
  */
 
 apply plugin: org.apache.beam.gradle.BeamModulePlugin
-applyPortabilityNature()
+applyPortabilityNature(shadowJarValidationExcludes: 
["org/apache/beam/model/fnexecution/v1/**"])
 
 description = "Apache Beam :: Model :: Fn Execution"
 ext.summary = "Portable definitions for execution user-defined functions."
@@ -25,6 +25,6 @@ ext.summary = "Portable definitions for execution 
user-defined functions."
 dependencies {
   // We purposely depend on the unshaded classes for protobuf compilation and
   // export the shaded variant as the actual runtime dependency.
-  protobuf project(path: ":beam-model-pipeline", configuration: "unshaded")
+  compile project(path: ":beam-model-pipeline", configuration: "unshaded")
   runtime project(path: ":beam-model-pipeline", configuration: "shadow")
 }
diff --git a/model/job-management/build.gradle 
b/model/job-management/build.gradle
index e453e33ba096..4c50782240e2 100644
--- a/model/job-management/build.gradle
+++ b/model/job-management/build.gradle
@@ -17,7 +17,7 @@
  */
 
 apply plugin: org.apache.beam.gradle.BeamModulePlugin
-applyPortabilityNature()
+applyPortabilityNature(shadowJarValidationExcludes: 
["org/apache/beam/model/jobmanagement/v1/**"])
 
 description = "Apache Beam :: Model :: Job Management"
 ext.summary = "Portable definitions for submitting pipelines."
@@ -25,6 +25,6 @@ ext.summary = "Portable definitions for submitting pipelines."
 dependencies {
   // We purposely depend on the unshaded classes for protobuf compilation and
   // export the shaded variant as the actual runtime dependency.
-  protobuf project(path: ":beam-model-pipeline", configuration: "unshaded")
+  compile project(path: ":beam-model-pipeline", configuration: "unshaded")
   runtime project(path: ":beam-model-pipeline", configuration: "shadow")
 }
diff --git a/model/pipeline/build.gradle b/model/pipeline/build.gradle
index 016a766392ed..2a72cd637713 100644
--- a/model/pipeline/build.gradle
+++ b/model/pipeline/build.gradle
@@ -17,7 +17,7 @@
  */
 
 apply plugin: org.apache.beam.gradle.BeamModulePlugin
-applyPortabilityNature()
+applyPortabilityNature(shadowJarValidationExcludes: 
["org/apache/beam/model/pipeline/v1/**"])
 
 description = "Apache Beam :: Model :: Pipeline"
 ext.summary = "Portable definitions for building pipelines"
diff --git 
a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle 
b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
index 5718d11d0d5a..7551758df984 100644
--- a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
@@ -78,20 +78,72 @@ def excluded_dependencies = [
         library.java.junit,                          // Test only
 ]
 
-applyJavaNature(publish: false, enableFindbugs: false /* TODO(BEAM-5658): 
enable findbugs */, validateShadowJar: false, shadowClosure: 
DEFAULT_SHADOW_CLOSURE << {
+applyJavaNature(
+        publish: false,
+        enableFindbugs: false /* TODO(BEAM-5658): enable findbugs */,
+        shadowJarValidationExcludes: [
+            "org/apache/beam/runners/dataflow/worker/**",
+            
"org/apache/beam/repackaged/beam_runners_google_cloud_dataflow_java_legacy_worker/**",
+            // TODO(BEAM-6137): Move DataflowRunnerHarness class under 
org.apache.beam.runners.dataflow.worker namespace
+            "com/google/cloud/dataflow/worker/DataflowRunnerHarness.class",
+            // TODO(BEAM-6136): Enable relocation for conscrypt
+            "org/conscrypt/**",
+        ],
+        shadowClosure: DEFAULT_SHADOW_CLOSURE << {
+    // Each included dependency must also include all of its necessary 
transitive dependencies
+    // or have them provided by the users pipeline during job submission. 
Typically a users
+    // pipeline includes :beam-runners-google-cloud-dataflow-java and its 
transitive dependencies
+    // so those dependencies don't need to be shaded (bundled and relocated) 
away. All other
+    // dependencies needed to run the worker must be shaded (bundled and 
relocated) to prevent
+    // ClassNotFound and/or MethodNotFound errors during pipeline execution.
+    //
+    // Each included dependency should have a matching relocation rule below 
that ensures
+    // that the shaded jar is correctly built.
+
+    dependencies {
+        include(project(path: ":beam-model-fn-execution", configuration: 
"shadow"))
+    }
+    relocate("org.apache.beam.model.fnexecution.v1", 
getWorkerRelocatedPath("org.apache.beam.model.fnexecution.v1"))
+
+    dependencies {
+        include(project(path: ":beam-runners-core-construction-java", 
configuration: "shadow"))
+        include(project(path: ":beam-runners-core-java", configuration: 
"shadow"))
+    }
+    relocate("org.apache.beam.runners.core", 
getWorkerRelocatedPath("org.apache.beam.runners.core"))
+    relocate("org.apache.beam.repackaged.beam_runners_core_construction_java", 
getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_core_construction_java"))
+    relocate("org.apache.beam.repackaged.beam_runners_core_java", 
getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_core_java"))
+
+    dependencies {
+        include(project(path: ":beam-runners-java-fn-execution", 
configuration: "shadow"))
+    }
+    relocate("org.apache.beam.runners.fnexecution", 
getWorkerRelocatedPath("org.apache.beam.runners.fnexecution"))
+    relocate("org.apache.beam.repackaged.beam_runners_java_fn_execution", 
getWorkerRelocatedPath("org.apache.beam.repackaged.beam_runners_java_fn_execution"))
+
+    dependencies {
+        include(project(path: ":beam-sdks-java-fn-execution", configuration: 
"shadow"))
+    }
+    relocate("org.apache.beam.sdk.fn", 
getWorkerRelocatedPath("org.apache.beam.sdk.fn"))
+    relocate("org.apache.beam.repackaged.beam_sdks_java_fn_execution", 
getWorkerRelocatedPath("org.apache.beam.repackaged.beam_sdks_java_fn_execution"))
+
+    // TODO(BEAM-6136): Enable relocation for conscrypt
+    dependencies {
+        
include(dependency("org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"))
+    }
+
+    dependencies {
+        // We have to include jetty-server/jetty-servlet and all of its 
transitive dependencies
+        // which includes several org.eclipse.jetty artifacts + servlet-api
+        include(dependency("org.eclipse.jetty:.*:9.2.10.v20150310"))
+        include(dependency("javax.servlet:javax.servlet-api:3.1.0"))
+    }
+    relocate("org.eclipse.jetty", getWorkerRelocatedPath("org.eclipse.jetty"))
+    relocate("javax.servlet", getWorkerRelocatedPath("javax.servlet"))
+
+    // We don't relocate windmill since it is already underneath the 
org.apache.beam.runners.dataflow.worker namespace and never
+    // expect a user pipeline to include it. There is also a JNI component 
that windmill server relies on which makes
+    // arbitrary relocation more difficult.
     dependencies {
         include(project(path: 
":beam-runners-google-cloud-dataflow-java-windmill", configuration: "shadow"))
-        include(dependency(".*:.*"))
-
-        sdk_provided_dependencies.each {
-            exclude(dependency(it))
-        }
-        sdk_provided_project_dependencies.each {
-            exclude(project(path: it, configuration: "shadow"))
-        }
-        excluded_dependencies.each {
-            exclude(dependency(it))
-        }
     }
 
     // Include original source files extracted under
@@ -100,47 +152,6 @@ applyJavaNature(publish: false, enableFindbugs: false /* 
TODO(BEAM-5658): enable
 
     exclude "META-INF/LICENSE.txt"
     exclude "about.html"
-
-    relocate("com.", getWorkerRelocatedPath("com.")) {
-        exclude "com.fasterxml.jackson.**"
-        exclude "com.google.api.client.**"
-        exclude "com.google.api.services.bigquery.**"
-        exclude "com.google.api.services.clouddebugger.**"
-        exclude "com.google.api.services.dataflow.**"
-        exclude "com.google.api.services.datastore.**"
-        exclude "com.google.api.services.pubsub.**"
-        exclude "com.google.api.services.storage.**"
-        exclude "com.google.auth.**"
-        exclude "com.google.cloud.dataflow.**"
-        exclude "com.sun.management*"
-        exclude "com.sun.management.**"
-    }
-    relocate("javax.servlet", getWorkerRelocatedPath("javax.servlet"))
-    relocate("io.", getWorkerRelocatedPath("io."))
-    relocate("okio.", getWorkerRelocatedPath("okio."))
-    relocate("org.", getWorkerRelocatedPath("org.")) {
-        // Exclude netty-tcnative from shading since gRPC relies on Netty to 
be able
-        // to load org.apache.tomcat.jni.SSL to provide an SSL context.
-        exclude "org.apache.avro.**"
-        exclude "org.apache.beam.**"
-        exclude "org.apache.tomcat.jni.**"
-        exclude "org.conscrypt.**"
-        exclude "org.eclipse.jetty.alpn.**"
-        exclude "org.eclipse.jetty.npn.**"
-        exclude "org.hamcrest.**"
-        exclude "org.joda.time.**"
-        exclude "org.junit.**"
-        exclude "org.slf4j.**"
-        exclude "org.w3c.dom.**"
-    }
-    relocate("org.apache.beam.runners.core.construction.",
-            getWorkerRelocatedPath("org.")) {
-              // Exclude org.apache.beam.runners.core.construction.metrics
-              // because it is referenced by
-              // org.apache.beam.runners.core.metrics which is not relocated.
-              exclude "org.apache.beam.runners.core.construction.metrics.**"
-            }
-
 })
 
 
/******************************************************************************/
@@ -184,7 +195,6 @@ dependencies {
     compile project(path: ":beam-sdks-java-fn-execution", configuration: 
"shadow")
     compile project(path: ":beam-runners-google-cloud-dataflow-java-windmill", 
configuration: "shadow")
     compile library.java.guava
-    compile "javax.servlet:javax.servlet-api:3.1.0"
     compile "org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"
     compile "org.eclipse.jetty:jetty-server:9.2.10.v20150310"
     compile "org.eclipse.jetty:jetty-servlet:9.2.10.v20150310"
diff --git a/runners/google-cloud-dataflow-java/worker/windmill/build.gradle 
b/runners/google-cloud-dataflow-java/worker/windmill/build.gradle
index 7939921ad55e..cd982e5993c8 100644
--- a/runners/google-cloud-dataflow-java/worker/windmill/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/windmill/build.gradle
@@ -17,7 +17,7 @@
  */
 
 apply plugin: org.apache.beam.gradle.BeamModulePlugin
-applyPortabilityNature()
+applyPortabilityNature(shadowJarValidationExcludes: 
["org/apache/beam/runners/dataflow/worker/windmill/**"])
 
 description = "Apache Beam :: Runners :: Google Cloud Dataflow Java :: 
Windmill"
 ext.summary = "Windmill proto specifications"


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


Issue Time Tracking
-------------------

    Worklog Id:     (was: 170097)
    Time Spent: 3h 20m  (was: 3h 10m)

> Dataflow cannot deserialize DoFns - incompatible serialVersionUID (JDK or 
> code version mismatch)
> ------------------------------------------------------------------------------------------------
>
>                 Key: BEAM-6102
>                 URL: https://issues.apache.org/jira/browse/BEAM-6102
>             Project: Beam
>          Issue Type: Bug
>          Components: runner-dataflow
>            Reporter: Ankur Goenka
>            Assignee: Luke Cwik
>            Priority: Blocker
>             Fix For: 2.9.0
>
>         Attachments: RunnerApi$BeamConstants$Constants.class, 
> WindowingStrategy.class
>
>          Time Spent: 3h 20m
>  Remaining Estimate: 0h
>
> The wordcount is broken on the master.
> Its failing with serialization error mentioned below
>  
> {code:java}
> 11:08:59 AM: Executing task 'WordCount.main()'... Parallel execution is an 
> incubating feature. > Task :buildSrc:compileJava NO-SOURCE > Task 
> :buildSrc:compileGroovy UP-TO-DATE > Task :buildSrc:processResources 
> NO-SOURCE > Task :buildSrc:classes UP-TO-DATE > Task :buildSrc:jar UP-TO-DATE 
> > Task :buildSrc:assemble UP-TO-DATE > Task :buildSrc:spotlessGroovy 
> UP-TO-DATE > Task :buildSrc:spotlessGroovyCheck UP-TO-DATE > Task 
> :buildSrc:spotlessGroovyGradle UP-TO-DATE > Task 
> :buildSrc:spotlessGroovyGradleCheck UP-TO-DATE > Task :buildSrc:spotlessCheck 
> UP-TO-DATE > Task :buildSrc:compileTestJava NO-SOURCE > Task 
> :buildSrc:compileTestGroovy NO-SOURCE > Task :buildSrc:processTestResources 
> NO-SOURCE > Task :buildSrc:testClasses UP-TO-DATE > Task :buildSrc:test 
> NO-SOURCE > Task :buildSrc:check UP-TO-DATE > Task :buildSrc:build UP-TO-DATE 
> Parallel execution with configuration on demand is an incubating feature. > 
> Configure project :beam-model-pipeline applyPortabilityNature with default 
> configuration for project beam-model-pipeline > Configure project 
> :beam-model-job-management applyPortabilityNature with default configuration 
> for project beam-model-job-management > Configure project 
> :beam-model-fn-execution applyPortabilityNature with default configuration 
> for project beam-model-fn-execution > Task 
> :beam-examples-java:processResources NO-SOURCE > Task 
> :beam-sdks-java-core:generateAvroProtocol NO-SOURCE > Task 
> :beam-sdks-java-extensions-google-cloud-platform-core:processResources 
> NO-SOURCE > Task :beam-sdks-java-io-google-cloud-platform:processResources 
> NO-SOURCE > Task :beam-vendor-grpc-v1_13_1:compileJava NO-SOURCE > Task 
> :beam-runners-core-construction-java:processResources NO-SOURCE > Task 
> :beam-sdks-java-extensions-protobuf:extractProto UP-TO-DATE > Task 
> :beam-model-pipeline:extractProto UP-TO-DATE > Task 
> :beam-model-job-management:processResources UP-TO-DATE > Task 
> :beam-runners-core-java:processResources NO-SOURCE > Task 
> :beam-sdks-java-fn-execution:processResources NO-SOURCE > Task 
> :beam-sdks-java-harness:processResources NO-SOURCE > Task 
> :beam-vendor-sdks-java-extensions-protobuf:processResources NO-SOURCE > Task 
> :beam-sdks-java-extensions-protobuf:processResources NO-SOURCE > Task 
> :beam-sdks-java-core:generateAvroJava NO-SOURCE > Task 
> :beam-vendor-grpc-v1_13_1:processResources NO-SOURCE > Task 
> :beam-model-fn-execution:processResources UP-TO-DATE > Task 
> :beam-model-pipeline:processResources UP-TO-DATE > Task 
> :beam-runners-local-java-core:processResources NO-SOURCE > Task 
> :beam-runners-java-fn-execution:processResources NO-SOURCE > Task 
> :beam-runners-direct-java:processResources NO-SOURCE > Task 
> :beam-vendor-grpc-v1_13_1:classes UP-TO-DATE > Task 
> :beam-sdks-java-core:processResources UP-TO-DATE > Task 
> :beam-runners-google-cloud-dataflow-java:processResources UP-TO-DATE > Task 
> :beam-vendor-grpc-v1_13_1:shadowJar UP-TO-DATE > Task 
> :beam-model-fn-execution:extractIncludeProto UP-TO-DATE > Task 
> :beam-model-pipeline:extractIncludeProto UP-TO-DATE > Task 
> :beam-model-job-management:extractIncludeProto UP-TO-DATE > Task 
> :beam-model-pipeline:generateProto UP-TO-DATE > Task 
> :beam-model-pipeline:compileJava UP-TO-DATE > Task 
> :beam-model-pipeline:classes UP-TO-DATE > Task :beam-model-pipeline:shadowJar 
> UP-TO-DATE > Task :beam-model-pipeline:jar UP-TO-DATE > Task 
> :beam-model-job-management:extractProto UP-TO-DATE > Task 
> :beam-model-fn-execution:extractProto UP-TO-DATE > Task 
> :beam-model-fn-execution:generateProto UP-TO-DATE > Task 
> :beam-model-job-management:generateProto UP-TO-DATE > Task 
> :beam-model-job-management:compileJava UP-TO-DATE > Task 
> :beam-model-job-management:classes UP-TO-DATE > Task 
> :beam-model-fn-execution:compileJava UP-TO-DATE > Task 
> :beam-model-fn-execution:classes UP-TO-DATE > Task 
> :beam-model-job-management:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-core:compileJava UP-TO-DATE > Task 
> :beam-sdks-java-core:classes UP-TO-DATE > Task 
> :beam-model-fn-execution:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-core:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-extensions-protobuf:extractIncludeProto UP-TO-DATE > Task 
> :beam-sdks-java-extensions-protobuf:generateProto NO-SOURCE > Task 
> :beam-sdks-java-fn-execution:compileJava UP-TO-DATE > Task 
> :beam-sdks-java-fn-execution:classes UP-TO-DATE > Task 
> :beam-runners-local-java-core:compileJava UP-TO-DATE > Task 
> :beam-runners-local-java-core:classes UP-TO-DATE > Task 
> :beam-runners-core-construction-java:compileJava UP-TO-DATE > Task 
> :beam-sdks-java-extensions-google-cloud-platform-core:compileJava UP-TO-DATE 
> > Task :beam-sdks-java-extensions-google-cloud-platform-core:classes 
> UP-TO-DATE > Task :beam-sdks-java-extensions-protobuf:compileJava UP-TO-DATE 
> > Task :beam-sdks-java-extensions-protobuf:classes UP-TO-DATE > Task 
> :beam-vendor-sdks-java-extensions-protobuf:compileJava UP-TO-DATE > Task 
> :beam-vendor-sdks-java-extensions-protobuf:classes UP-TO-DATE > Task 
> :beam-runners-core-construction-java:classes UP-TO-DATE > Task 
> :beam-runners-local-java-core:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-fn-execution:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-extensions-protobuf:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-extensions-google-cloud-platform-core:shadowJar UP-TO-DATE > 
> Task :beam-vendor-sdks-java-extensions-protobuf:shadowJar UP-TO-DATE > Task 
> :beam-runners-core-construction-java:shadowJar UP-TO-DATE > Task 
> :beam-runners-core-java:compileJava UP-TO-DATE > Task 
> :beam-runners-core-java:classes UP-TO-DATE > Task 
> :beam-runners-core-java:shadowJar UP-TO-DATE > Task 
> :beam-sdks-java-io-google-cloud-platform:compileJava UP-TO-DATE > Task 
> :beam-sdks-java-io-google-cloud-platform:classes UP-TO-DATE > Task 
> :beam-sdks-java-harness:compileJava UP-TO-DATE > Task 
> :beam-sdks-java-harness:classes UP-TO-DATE > Task :beam-sdks-java-harness:jar 
> UP-TO-DATE > Task :beam-sdks-java-io-google-cloud-platform:shadowJar 
> UP-TO-DATE > Task :beam-sdks-java-harness:shadowJar UP-TO-DATE > Task 
> :beam-examples-java:compileJava UP-TO-DATE > Task :beam-examples-java:classes 
> UP-TO-DATE > Task :beam-runners-java-fn-execution:compileJava UP-TO-DATE > 
> Task :beam-runners-java-fn-execution:classes UP-TO-DATE > Task 
> :beam-runners-java-fn-execution:shadowJar UP-TO-DATE > Task 
> :beam-runners-google-cloud-dataflow-java:compileJava UP-TO-DATE > Task 
> :beam-runners-google-cloud-dataflow-java:classes UP-TO-DATE > Task 
> :beam-runners-direct-java:compileJava UP-TO-DATE > Task 
> :beam-runners-direct-java:classes UP-TO-DATE > Task 
> :beam-runners-google-cloud-dataflow-java:shadowJar UP-TO-DATE > Task 
> :beam-runners-direct-java:shadowJar UP-TO-DATE > Task 
> :beam-examples-java:WordCount.main() Nov 20, 2018 11:09:02 AM 
> org.apache.beam.sdk.extensions.gcp.options.GcpOptions$DefaultProjectFactory 
> create INFO: Inferred default GCP project 'google.com:clouddfe' from gcloud. 
> If this is the incorrect project, please cancel this Pipeline and specify the 
> command-line argument --project. Nov 20, 2018 11:09:02 AM 
> com.google.auth.oauth2.DefaultCredentialsProvider 
> warnAboutProblematicCredentials WARNING: Your application has authenticated 
> using end user credentials from Google Cloud SDK. We recommend that most 
> server applications use service accounts instead. If your application 
> continues to use end user credentials from Cloud SDK, you might receive a 
> "quota exceeded" or "API not enabled" error. For more information about 
> service accounts, see https://cloud.google.com/docs/authentication/. Nov 20, 
> 2018 11:09:03 AM 
> org.apache.beam.sdk.extensions.gcp.options.GcpOptions$GcpTempLocationFactory 
> tryCreateDefaultBucket INFO: No tempLocation specified, attempting to use 
> default bucket: dataflow-staging-us-central1-927334603519 Nov 20, 2018 
> 11:09:03 AM 
> org.apache.beam.sdk.util.RetryHttpRequestInitializer$LoggingHttpBackOffHandler
>  handleResponse WARNING: Request failed with code 409, performed 0 retries 
> due to IOExceptions, performed 0 retries due to unsuccessful status codes, 
> HTTP framework says request can be retried, (caller responsible for 
> retrying): 
> https://www.googleapis.com/storage/v1/b?predefinedAcl=projectPrivate&predefinedDefaultObjectAcl=projectPrivate&project=google.com:clouddfe
>  Nov 20, 2018 11:09:03 AM 
> org.apache.beam.runners.dataflow.options.DataflowPipelineOptions$StagingLocationFactory
>  create INFO: No stagingLocation provided, falling back to gcpTempLocation 
> Nov 20, 2018 11:09:04 AM org.apache.beam.runners.dataflow.DataflowRunner 
> fromOptions INFO: PipelineOptions.filesToStage was not specified. Defaulting 
> to files from the classpath: will stage 120 files. Enable logging at DEBUG 
> level to see which files will be staged. Nov 20, 2018 11:09:04 AM 
> org.apache.beam.runners.dataflow.DataflowRunner run INFO: Executing pipeline 
> on the Dataflow Service, which will have billing implications related to 
> Google Compute Engine usage and other Google Cloud Services. Nov 20, 2018 
> 11:09:04 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> stageClasspathElements INFO: Uploading 120 files from 
> PipelineOptions.filesToStage to staging location to prepare for execution. 
> Nov 20, 2018 11:09:04 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> stageClasspathElements WARNING: Skipping non-existent file to stage 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/examples/java/build/resources/main.
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client/1.27.0/52d6076f58edfedb55526c0cac4ba155c6dc645b/google-http-client-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-1.27.0-XiDeJfgpNiiCZxI1a4Q27Q.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.oauth-client/google-oauth-client-java6/1.27.0/90570eedf1ae6188ee5028c11ec423fe52336373/google-oauth-client-java6-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-oauth-client-java6-1.27.0-VWEWXu2J1auJGvb5eLfbhg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-pubsub/v1-rev20181105-1.27.0/31f49e03ec44c72845a6a0572ccba74d7aee1f57/google-api-services-pubsub-v1-rev20181105-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-pubsub-v1-rev20181105-1.27.0-_3JmrTY281LX4AcOiP7LkQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-dataflow/v1b3-rev20181107-1.27.0/d0b3764765916d65fd75a52ae1a8cf13a150450a/google-api-services-dataflow-v1b3-rev20181107-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-dataflow-v1b3-rev20181107-1.27.0-ZCpCwXOULzPs_S2gBBH7TA.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.oauth-client/google-oauth-client/1.27.0/1979e3b499f1ef49959005ca59101791361abca9/google-oauth-client-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-oauth-client-1.27.0-waNrxu589XHIvY73DhrxNA.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-clouddebugger/v2-rev20180801-1.27.0/2866c6cfca03ef7bbf687ba3b62fa843377e664b/google-api-services-clouddebugger-v2-rev20180801-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-clouddebugger-v2-rev20180801-1.27.0-xrIcl7yUGCktYEb9_xtvjQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client-jackson2/1.27.0/1e1a91b684fc2e6e97d2369c7d8d1d9efc115543/google-api-client-jackson2-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-jackson2-1.27.0-M222JGX1Wh3HoNtcvwCaEw.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client-java6/1.27.0/2c06247935819b429424797d9844aa33955f4fb0/google-api-client-java6-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-java6-1.27.0-zPlGOao_rbTkn6baqjfy2w.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/io/google-cloud-platform/build/libs/beam-sdks-java-io-google-cloud-platform-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-io-google-cloud-platform-2.9.0-SNAPSHOT-xFFgKvdT2GS2td3iSJaJqQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-bigquery/v2-rev20181104-1.27.0/d539fb4b7ac318cadc344579fa6f80ae6cb8070e/google-api-services-bigquery-v2-rev20181104-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-bigquery-v2-rev20181104-1.27.0-oUKA0dpY-zmhgM8DIAq8Zg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-protobuf/1.27.0/b81859b8a284ea416d74644a70524c4d6be3e9a/google-http-client-protobuf-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-protobuf-1.27.0-1E0HsNaSwDtqOduUs-NssA.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-jackson/1.27.0/2b5eff5b6a343e716db21673743576db9a53e5ae/google-http-client-jackson-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-jackson-1.27.0-XQrBFTlCGWMqVu9vUPkMcw.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client/1.27.0/7d498c11db92fb72521784212c953612a42e50db/google-api-client-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-1.27.0-UjH-dlvVLqF5D_4Trh8foQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-storage/v1-rev20181013-1.27.0/10593f99e06d962017b0663348ee1fed0f8528/google-api-services-storage-v1-rev20181013-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-storage-v1-rev20181013-1.27.0-Y65RPabM4FA03rQX5lUvKg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/model/job-management/build/libs/beam-model-job-management-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-model-job-management-2.9.0-SNAPSHOT-PRRkG0x941fKfrBa33f-Qg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-cloudresourcemanager/v1-rev20181015-1.27.0/354796285db3b2fce9089147db3fe1f2a8cca444/google-api-services-cloudresourcemanager-v1-rev20181015-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-cloudresourcemanager-v1-rev20181015-1.27.0-SQfpsiUEwOlTbzfnWC4EzA.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/model/pipeline/build/libs/beam-model-pipeline-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-model-pipeline-2.9.0-SNAPSHOT-XD9dbaFWaiUJnjDcCAkrOg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/org.apache.httpcomponents/httpcore/4.4.9/a86ce739e5a7175b4b234c290a00a5fdb80957a0/httpcore-4.4.9.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/httpcore-4.4.9-uJRVUHg5wJ1hGWYd79IWag.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/extensions/protobuf/build/libs/beam-sdks-java-extensions-protobuf-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-extensions-protobuf-2.9.0-SNAPSHOT-4q_iSCAMQwCGDowjT6QSKw.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/extensions/google-cloud-platform-core/build/libs/beam-sdks-java-extensions-google-cloud-platform-core-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-extensions-google-cloud-platform-core-2.9.0-SNAPSHOT-qVkigYLlAbU1ujEdaTQZ0Q.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/org.apache.httpcomponents/httpclient/4.5.5/1603dfd56ebcd583ccdf337b6c3984ac55d89e58/httpclient-4.5.5.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/httpclient-4.5.5-l-flsTVHa30lpasx4epJIg.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/core-construction-java/build/libs/beam-runners-core-construction-java-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-core-construction-java-2.9.0-SNAPSHOT-XbHTR4DoxxMKF5aSfjqWYQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-jackson2/1.27.0/25ee9240648b5dfd01a3385937bcf332b6f222a7/google-http-client-jackson2-1.27.0.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-jackson2-1.27.0-s26dP1SAvqbYDq_HiJ9Bbw.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/direct-java/build/libs/beam-runners-direct-java-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-direct-java-2.9.0-SNAPSHOT-jn9fS-cJcB_-dTRLAU-meQ.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/google-cloud-dataflow-java/build/libs/beam-runners-google-cloud-dataflow-java-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-google-cloud-dataflow-java-2.9.0-SNAPSHOT-oQdbiEtnM6C2iUC5axtRjA.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/examples/java/build/classes/java/main
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/main-cxzukVaCwjdrlMI-NQ8Jew.jar
>  Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/vendor/grpc-v1_13_1/build/libs/beam-vendor-grpc-v1_13_1-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-vendor-grpc-v1_13_1-2.9.0-SNAPSHOT-E8CXf40HkGDxZZU040A-bA.jar
>  Nov 20, 2018 11:09:06 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading 
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/core/build/libs/beam-sdks-java-core-2.9.0-SNAPSHOT.jar
>  to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-core-2.9.0-SNAPSHOT-Aoyvg0doZsYvr7jINSJAKA.jar
>  Nov 20, 2018 11:09:21 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> stageClasspathElements INFO: Staging files complete: 91 files cached, 28 
> files newly uploaded Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding ReadLines/Read as step s1 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.sdk.io.FileBasedSource getEstimatedSizeBytes INFO: 
> Filepattern gs://apache-beam-samples/shakespeare/kinglear.txt matched 1 files 
> with total size 157283 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding WordCount.CountWords/ParDo(ExtractWords) as step s2 Nov 
> 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding WordCount.CountWords/Count.PerElement/Init/Map as step 
> s3 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey as 
> step s4 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
>  as step s5 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding MapElements/Map as step s6 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign 
> as step s7 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles 
> as step s8 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten as 
> step s9 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten as 
> step s10 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum as step 
> s11 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/Flatten.PCollections 
> as step s12 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)
>  as step s13 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/CreateDataflowView 
> as step s14 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Create.Values/Read(CreateSource)
>  as step s15 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
>  as step s16 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
>  as step s17 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize 
> as step s18 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair 
> with random key as step s19 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
>  as step s20 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey
>  as step s21 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
>  as step s22 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator 
> addStep INFO: Adding 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Values/Values/Map
>  as step s23 Nov 20, 2018 11:09:21 AM 
> org.apache.beam.runners.dataflow.DataflowRunner run INFO: Staging pipeline 
> description to gs://dataflow-staging-us-central1-927334603519/temp/staging/ 
> Nov 20, 2018 11:09:22 AM org.apache.beam.runners.dataflow.util.PackageUtil 
> tryStagePackage INFO: Uploading <63064 bytes, hash 1eMyLHEGQDSnbL_rtt2mOQ> to 
> gs://dataflow-staging-us-central1-927334603519/temp/staging/pipeline-1eMyLHEGQDSnbL_rtt2mOQ.pb
>  Dataflow SDK version: 2.9.0-SNAPSHOT Nov 20, 2018 11:09:24 AM 
> org.apache.beam.runners.dataflow.DataflowRunner run Submitted job: 
> 2018-11-20_11_09_23-3437480453113408186 INFO: To access the Dataflow 
> monitoring console, please navigate to 
> https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-11-20_11_09_23-3437480453113408186?project=google.com%3Aclouddfe
>  Nov 20, 2018 11:09:24 AM org.apache.beam.runners.dataflow.DataflowRunner run 
> INFO: To cancel the job using the 'gcloud' tool, run: > gcloud dataflow jobs 
> --project=google.com:clouddfe cancel --region=us-central1 
> 2018-11-20_11_09_23-3437480453113408186 Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:23.285Z: Autoscaling is enabled for job 
> 2018-11-20_11_09_23-3437480453113408186. The number of workers will be 
> between 1 and 1000. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:23.346Z: Autoscaling was automatically enabled for job 
> 2018-11-20_11_09_23-3437480453113408186. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:26.020Z: Checking permissions granted to controller 
> Service Account. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:30.430Z: Worker configuration: n1-standard-1 in 
> us-central1-a. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.335Z: Expanding CoGroupByKey operations into 
> optimizable parts. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.555Z: Expanding GroupByKey operations into 
> optimizable parts. Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.614Z: Lifting ValueCombiningMappingFns into 
> MergeBucketsMappingFns Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.886Z: Fusing adjacent ParDo, Read, Write, and 
> Flatten operations Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.941Z: Fusing consumer 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
>  into 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Create.Values/Read(CreateSource)
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:31.994Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair 
> with random key Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.044Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/GroupByWindow
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.099Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Values/Values/Map
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.152Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Reify
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.196Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Write
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Reify
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.244Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/GroupByWindow
>  into 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Read
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.312Z: Fusing consumer 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
>  into 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.364Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize into 
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.410Z: Fusing consumer 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair 
> with random key into WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.459Z: Unzipping flatten s12 for input 
> s11.org.apache.beam.sdk.values.PCollection.<init>:402#20ff67585e33a8f6 Nov 
> 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.491Z: Fusing unzipped copy of 
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow),
>  through flatten 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/Flatten.PCollections, 
> into producer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum Nov 20, 
> 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.536Z: Fusing consumer 
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)
>  into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.575Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten Nov 
> 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.627Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
>  into 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Read 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.686Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write 
> into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.726Z: Fusing consumer 
> WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign into MapElements/Map 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.781Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Extract
>  into 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.838Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/GroupByWindow
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.884Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial
>  into WordCount.CountWords/Count.PerElement/Init/Map Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.928Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write 
> into 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:32.975Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles 
> into WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign Nov 20, 2018 
> 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.029Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Init/Map into 
> WordCount.CountWords/ParDo(ExtractWords) Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.074Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/GroupByWindow
>  into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Read 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.112Z: Fusing consumer MapElements/Map into 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Extract
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.152Z: Fusing consumer 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify 
> into 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.190Z: Fusing consumer 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify 
> into 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.245Z: Fusing consumer 
> WordCount.CountWords/ParDo(ExtractWords) into ReadLines/Read Nov 20, 2018 
> 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.764Z: Executing operation 
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Create
>  Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.810Z: Executing operation 
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Create 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.850Z: Executing operation 
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Create 
> Nov 20, 2018 11:09:34 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:33.889Z: Starting 1 workers in us-central1-a... Nov 
> 20, 2018 11:09:37 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:34.261Z: Executing operation 
> ReadLines/Read+WordCount.CountWords/ParDo(ExtractWords)+WordCount.CountWords/Count.PerElement/Init/Map+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write
>  Nov 20, 2018 11:09:44 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:09:43.287Z: Autoscaling: Raised the number of workers to 
> 0 based on the rate of progress in the currently running step(s). Nov 20, 
> 2018 11:10:16 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:10:15.798Z: Autoscaling: Raised the number of workers to 
> 1 based on the rate of progress in the currently running step(s). Nov 20, 
> 2018 11:10:16 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:10:15.859Z: Autoscaling: Would further reduce the number 
> of workers but reached the minimum number allowed for the job. Nov 20, 2018 
> 11:10:22 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:10:21.327Z: Workers have started successfully. Nov 20, 
> 2018 11:10:22 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:10:21.511Z: Workers have started successfully. Nov 20, 
> 2018 11:10:35 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> SEVERE: 2018-11-20T19:10:32.138Z: java.lang.RuntimeException: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
>  at java.util.concurrent.FutureTask.run(FutureTask.java:266) at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  at java.lang.Thread.run(Thread.java:745) Caused by: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
>  at 
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
>  ... 14 more Caused by: java.lang.IllegalArgumentException: unable to 
> deserialize Serialized DoFnInfo at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
>  ... 21 more Caused by: java.io.InvalidClassException: 
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible: 
> stream classdesc serialVersionUID = -6607512772692666907, local class 
> serialVersionUID = -3616600070988263902 at 
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at 
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at 
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
>  ... 28 more Nov 20, 2018 11:10:39 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> SEVERE: 2018-11-20T19:10:36.934Z: java.lang.RuntimeException: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
>  at java.util.concurrent.FutureTask.run(FutureTask.java:266) at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  at java.lang.Thread.run(Thread.java:745) Caused by: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
>  at 
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
>  ... 14 more Caused by: java.lang.IllegalArgumentException: unable to 
> deserialize Serialized DoFnInfo at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
>  ... 21 more Caused by: java.io.InvalidClassException: 
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible: 
> stream classdesc serialVersionUID = -6607512772692666907, local class 
> serialVersionUID = -3616600070988263902 at 
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at 
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at 
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
>  ... 28 more Nov 20, 2018 11:10:45 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> SEVERE: 2018-11-20T19:10:44.927Z: java.lang.RuntimeException: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
>  at java.util.concurrent.FutureTask.run(FutureTask.java:266) at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  at java.lang.Thread.run(Thread.java:745) Caused by: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
>  at 
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
>  ... 14 more Caused by: java.lang.IllegalArgumentException: unable to 
> deserialize Serialized DoFnInfo at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
>  ... 21 more Caused by: java.io.InvalidClassException: 
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible: 
> stream classdesc serialVersionUID = -6607512772692666907, local class 
> serialVersionUID = -3616600070988263902 at 
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at 
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at 
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
>  ... 28 more Nov 20, 2018 11:11:01 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> SEVERE: 2018-11-20T19:11:00.729Z: java.lang.RuntimeException: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
>  at 
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
>  at 
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
>  at 
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
>  at java.util.concurrent.FutureTask.run(FutureTask.java:266) at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  at java.lang.Thread.run(Thread.java:745) Caused by: 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
>  java.lang.IllegalArgumentException: unable to deserialize Serialized 
> DoFnInfo at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
>  at 
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
>  at 
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
>  ... 14 more Caused by: java.lang.IllegalArgumentException: unable to 
> deserialize Serialized DoFnInfo at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
>  at 
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
>  at 
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
>  ... 21 more Caused by: java.io.InvalidClassException: 
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible: 
> stream classdesc serialVersionUID = -6607512772692666907, local class 
> serialVersionUID = -3616600070988263902 at 
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at 
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at 
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at 
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at 
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at 
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at 
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at 
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
>  ... 28 more Nov 20, 2018 11:11:01 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> SEVERE: 2018-11-20T19:11:00.885Z: Workflow failed. Causes: 
> S02:ReadLines/Read+WordCount.CountWords/ParDo(ExtractWords)+WordCount.CountWords/Count.PerElement/Init/Map+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write
>  failed., A work item was attempted 4 times without success. Each time the 
> worker eventually lost contact with the service. The work item was attempted 
> on: wordcount-goenka-11201909-11201109-n7q9-harness-6q3n, 
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n, 
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n, 
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n Nov 20, 2018 11:11:02 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:11:01.059Z: Cleaning up. Nov 20, 2018 11:11:02 AM 
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process 
> INFO: 2018-11-20T19:11:01.473Z: Stopping worker pool...
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to