[
https://issues.apache.org/jira/browse/BEAM-6102?focusedWorklogId=169512&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-169512
]
ASF GitHub Bot logged work on BEAM-6102:
----------------------------------------
Author: ASF GitHub Bot
Created on: 26/Nov/18 22:08
Start Date: 26/Nov/18 22:08
Worklog Time Spent: 10m
Work Description: lukecwik closed pull request #7116: [BEAM-6102]
Clean-up worker shading rules.
URL: https://github.com/apache/beam/pull/7116
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/runners/google-cloud-dataflow-java/worker/build.gradle
b/runners/google-cloud-dataflow-java/worker/build.gradle
index c8e2795b50fd..70b587a0909e 100644
--- a/runners/google-cloud-dataflow-java/worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/build.gradle
@@ -26,39 +26,9 @@ def DATAFLOW_VERSION = "dataflow.version"
// Get full dependency of 'com.google.apis:google-api-services-dataflow'
def google_api_services_dataflow = project.hasProperty(DATAFLOW_VERSION) ?
"com.google.apis:google-api-services-dataflow:" + getProperty(DATAFLOW_VERSION)
: library.java.google_api_services_dataflow
-// Returns a string representing the relocated path to be used with the shadow
-// plugin when given a suffix such as "com.".
-def getWorkerRelocatedPath = { String suffix ->
- return ("org.apache.beam.runners.dataflow.worker.repackaged."
- + suffix)
-}
-
-// Following listed dependencies will be shaded only in fnapi worker, not
legacy
-// worker
-def sdk_provided_dependencies = [
- google_api_services_dataflow,
- library.java.avro,
- library.java.google_api_client,
- library.java.google_http_client,
- library.java.google_http_client_jackson,
- library.java.jackson_annotations,
- library.java.jackson_core,
- library.java.jackson_databind,
- library.java.joda_time,
-]
-
-def sdk_provided_project_dependencies = [
- ":beam-runners-google-cloud-dataflow-java",
- ":beam-sdks-java-core",
- ":beam-sdks-java-extensions-google-cloud-platform-core",
- ":beam-sdks-java-io-google-cloud-platform",
-]
-
-applyJavaNature(publish: false, enableFindbugs: false /* TODO(BEAM-5658):
enable findbugs */, validateShadowJar: false, shadowClosure:
DEFAULT_SHADOW_CLOSURE << {
- dependencies {
- include(project(path: ":beam-runners-google-cloud-dataflow-java-windmill",
configuration: "shadow"))
- include(dependency(".*:.*"))
- }
+applyJavaNature(publish: false, enableFindbugs: false /* TODO(BEAM-5658):
enable findbugs */, validateShadowJar: false, shadowClosure: {
+ // In the case of the Fn API worker jar we are creating an application so we
rely on
+ // the shadow plugin to include all transitive dependencies to create an
uber jar.
// Include original source files extracted under
// '$buildDir/original_sources_to_package' to jar
@@ -81,34 +51,33 @@ configurations {
}
}
-def common_compile_project_dependencies = [
- ":beam-model-fn-execution",
- ":beam-model-pipeline",
- ":beam-runners-core-construction-java",
- ":beam-runners-core-java",
- ":beam-runners-java-fn-execution",
- ":beam-sdks-java-fn-execution",
- ":beam-runners-google-cloud-dataflow-java-windmill",
-]
-
-def common_test_compile_project_dependencies = [
- ":beam-runners-core-java",
- ":beam-sdks-java-core",
- ":beam-sdks-java-extensions-google-cloud-platform-core",
-]
-
dependencies {
- sdk_provided_dependencies.each {
- shadow(it)
- }
- sdk_provided_project_dependencies.each {
- shadow project(path: it, configuration: "shadow")
- }
-
- common_compile_project_dependencies.each {
- compile project(path: it, configuration: "shadow")
- }
-
+ // Note that any dependency that is modified here should also be modified
within
+ // runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
using the rules provided
+ // there.
+ //
+ // All main sourceset dependencies here should be listed as compile scope so
that the dependencies
+ // are all packaged into a single uber jar allowing the jar to serve as an
application.
+ compile project(path: ":beam-runners-google-cloud-dataflow-java",
configuration: "shadow")
+ compile project(path: ":beam-sdks-java-core", configuration: "shadow")
+ compile project(path:
":beam-sdks-java-extensions-google-cloud-platform-core", configuration:
"shadow")
+ compile project(path: ":beam-sdks-java-io-google-cloud-platform",
configuration: "shadow")
+ compile project(path: ":beam-model-fn-execution", configuration: "shadow")
+ compile project(path: ":beam-model-pipeline", configuration: "shadow")
+ compile project(path: ":beam-runners-core-construction-java", configuration:
"shadow")
+ compile project(path: ":beam-runners-core-java", configuration: "shadow")
+ compile project(path: ":beam-runners-java-fn-execution", configuration:
"shadow")
+ compile project(path: ":beam-sdks-java-fn-execution", configuration:
"shadow")
+ compile project(path: ":beam-runners-google-cloud-dataflow-java-windmill",
configuration: "shadow")
+ compile google_api_services_dataflow
+ compile library.java.avro
+ compile library.java.google_api_client
+ compile library.java.google_http_client
+ compile library.java.google_http_client_jackson
+ compile library.java.jackson_annotations
+ compile library.java.jackson_core
+ compile library.java.jackson_databind
+ compile library.java.joda_time
compile library.java.guava
compile library.java.slf4j_api
compile library.java.vendored_grpc_1_13_1
@@ -116,22 +85,20 @@ dependencies {
compile "org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"
compile "org.eclipse.jetty:jetty-server:9.2.10.v20150310"
compile "org.eclipse.jetty:jetty-servlet:9.2.10.v20150310"
-
- provided library.java.error_prone_annotations
-
- runtime library.java.slf4j_jdk14
-
- common_test_compile_project_dependencies.each {
- testCompile project(path: it, configuration: "shadowTest")
- }
-
- testCompile project(path: ":beam-runners-direct-java", configuration:
"shadow")
-
- testCompile library.java.guava_testlib
- testCompile library.java.hamcrest_core
- testCompile library.java.hamcrest_library
- testCompile library.java.junit
- testCompile library.java.mockito_core
+ compile library.java.error_prone_annotations
+ compile library.java.slf4j_jdk14
+
+ // All test sourceset dependencies can be marked as shadowTest since we
create an uber jar without
+ // relocating any code.
+ shadowTest project(path: ":beam-runners-core-java", configuration:
"shadowTest")
+ shadowTest project(path: ":beam-sdks-java-core", configuration: "shadowTest")
+ shadowTest project(path:
":beam-sdks-java-extensions-google-cloud-platform-core", configuration:
"shadowTest")
+ shadowTest project(path: ":beam-runners-direct-java", configuration:
"shadow")
+ shadowTest library.java.guava_testlib
+ shadowTest library.java.hamcrest_core
+ shadowTest library.java.hamcrest_library
+ shadowTest library.java.junit
+ shadowTest library.java.mockito_core
}
//TODO(BEAM-5657): checktyle task should be enabled in the future.
diff --git
a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
index 8055876124eb..d88bec1f96dd 100644
--- a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
@@ -52,9 +52,12 @@ def sdk_provided_dependencies = [
library.java.jackson_core,
library.java.jackson_databind,
library.java.joda_time,
+ library.java.slf4j_api,
+ library.java.vendored_grpc_1_13_1,
]
def sdk_provided_project_dependencies = [
+ ":beam-model-pipeline",
":beam-runners-google-cloud-dataflow-java",
":beam-sdks-java-core",
":beam-sdks-java-extensions-google-cloud-platform-core",
@@ -84,7 +87,7 @@ applyJavaNature(publish: false, enableFindbugs: false /*
TODO(BEAM-5658): enable
exclude(dependency(it))
}
sdk_provided_project_dependencies.each {
- exclude(project(path: it))
+ exclude(project(path: it, configuration: "shadow"))
}
excluded_dependencies.each {
exclude(dependency(it))
@@ -148,22 +151,6 @@ configurations {
}
}
-def common_compile_project_dependencies = [
- ":beam-model-fn-execution",
- ":beam-model-pipeline",
- ":beam-runners-core-construction-java",
- ":beam-runners-core-java",
- ":beam-runners-java-fn-execution",
- ":beam-sdks-java-fn-execution",
- ":beam-runners-google-cloud-dataflow-java-windmill",
-]
-
-def common_test_compile_project_dependencies = [
- ":beam-runners-core-java",
- ":beam-sdks-java-core",
- ":beam-sdks-java-extensions-google-cloud-platform-core",
-]
-
evaluationDependsOn(":beam-runners-google-cloud-dataflow-java-fn-api-worker")
compileJava {
@@ -175,6 +162,9 @@ compileTestJava {
}
dependencies {
+ // Note that any dependency that is modified here should also be modified
within
+ // runners/google-cloud-dataflow-java/worker/build.gradle using the rules
provided
+ // there.
sdk_provided_dependencies.each {
provided(it)
}
@@ -182,33 +172,32 @@ dependencies {
provided project(path: it, configuration: "shadow")
}
- common_compile_project_dependencies.each {
- compile project(path: it, configuration: "shadow")
- }
-
+ compile project(path: ":beam-model-fn-execution", configuration: "shadow")
+ compile project(path: ":beam-runners-core-construction-java",
configuration: "shadow")
+ compile project(path: ":beam-runners-core-java", configuration: "shadow")
+ compile project(path: ":beam-runners-java-fn-execution", configuration:
"shadow")
+ compile project(path: ":beam-sdks-java-fn-execution", configuration:
"shadow")
+ compile project(path: ":beam-runners-google-cloud-dataflow-java-windmill",
configuration: "shadow")
compile library.java.guava
- compile library.java.slf4j_api
- compile library.java.vendored_grpc_1_13_1
compile "javax.servlet:javax.servlet-api:3.1.0"
compile "org.conscrypt:conscrypt-openjdk:1.1.3:linux-x86_64"
compile "org.eclipse.jetty:jetty-server:9.2.10.v20150310"
compile "org.eclipse.jetty:jetty-servlet:9.2.10.v20150310"
-
provided library.java.error_prone_annotations
-
runtime library.java.slf4j_jdk14
- common_test_compile_project_dependencies.each {
- testCompile project(path: it, configuration: "shadowTest")
- }
-
- testCompile project(path: ":beam-runners-direct-java", configuration:
"shadow")
-
- testCompile library.java.guava_testlib
- testCompile library.java.hamcrest_core
- testCompile library.java.hamcrest_library
- testCompile library.java.junit
- testCompile library.java.mockito_core
+ // Any test dependency which intersects with our relocation rules above
needs to be relocated
+ // as well and placed within the testCompile configuration. Otherwise we
can place it within
+ // the shadowTest configuration.
+ testCompile project(path: ":beam-runners-core-java", configuration:
"shadowTest")
+ shadowTest library.java.guava_testlib
+ shadowTest project(path:
":beam-sdks-java-extensions-google-cloud-platform-core", configuration:
"shadowTest")
+ shadowTest project(path: ":beam-runners-direct-java", configuration:
"shadow")
+ shadowTest project(path: ":beam-sdks-java-core", configuration:
"shadowTest")
+ shadowTest library.java.hamcrest_core
+ shadowTest library.java.hamcrest_library
+ shadowTest library.java.junit
+ shadowTest library.java.mockito_core
}
//TODO(BEAM-5657): checktyle task should be enabled in the future.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 169512)
Time Spent: 1h (was: 50m)
> Dataflow cannot deserialize DoFns - incompatible serialVersionUID (JDK or
> code version mismatch)
> ------------------------------------------------------------------------------------------------
>
> Key: BEAM-6102
> URL: https://issues.apache.org/jira/browse/BEAM-6102
> Project: Beam
> Issue Type: Bug
> Components: runner-dataflow
> Reporter: Ankur Goenka
> Assignee: Luke Cwik
> Priority: Blocker
> Fix For: 2.9.0
>
> Attachments: WindowingStrategy.class
>
> Time Spent: 1h
> Remaining Estimate: 0h
>
> The wordcount is broken on the master.
> Its failing with serialization error mentioned below
>
> {code:java}
> 11:08:59 AM: Executing task 'WordCount.main()'... Parallel execution is an
> incubating feature. > Task :buildSrc:compileJava NO-SOURCE > Task
> :buildSrc:compileGroovy UP-TO-DATE > Task :buildSrc:processResources
> NO-SOURCE > Task :buildSrc:classes UP-TO-DATE > Task :buildSrc:jar UP-TO-DATE
> > Task :buildSrc:assemble UP-TO-DATE > Task :buildSrc:spotlessGroovy
> UP-TO-DATE > Task :buildSrc:spotlessGroovyCheck UP-TO-DATE > Task
> :buildSrc:spotlessGroovyGradle UP-TO-DATE > Task
> :buildSrc:spotlessGroovyGradleCheck UP-TO-DATE > Task :buildSrc:spotlessCheck
> UP-TO-DATE > Task :buildSrc:compileTestJava NO-SOURCE > Task
> :buildSrc:compileTestGroovy NO-SOURCE > Task :buildSrc:processTestResources
> NO-SOURCE > Task :buildSrc:testClasses UP-TO-DATE > Task :buildSrc:test
> NO-SOURCE > Task :buildSrc:check UP-TO-DATE > Task :buildSrc:build UP-TO-DATE
> Parallel execution with configuration on demand is an incubating feature. >
> Configure project :beam-model-pipeline applyPortabilityNature with default
> configuration for project beam-model-pipeline > Configure project
> :beam-model-job-management applyPortabilityNature with default configuration
> for project beam-model-job-management > Configure project
> :beam-model-fn-execution applyPortabilityNature with default configuration
> for project beam-model-fn-execution > Task
> :beam-examples-java:processResources NO-SOURCE > Task
> :beam-sdks-java-core:generateAvroProtocol NO-SOURCE > Task
> :beam-sdks-java-extensions-google-cloud-platform-core:processResources
> NO-SOURCE > Task :beam-sdks-java-io-google-cloud-platform:processResources
> NO-SOURCE > Task :beam-vendor-grpc-v1_13_1:compileJava NO-SOURCE > Task
> :beam-runners-core-construction-java:processResources NO-SOURCE > Task
> :beam-sdks-java-extensions-protobuf:extractProto UP-TO-DATE > Task
> :beam-model-pipeline:extractProto UP-TO-DATE > Task
> :beam-model-job-management:processResources UP-TO-DATE > Task
> :beam-runners-core-java:processResources NO-SOURCE > Task
> :beam-sdks-java-fn-execution:processResources NO-SOURCE > Task
> :beam-sdks-java-harness:processResources NO-SOURCE > Task
> :beam-vendor-sdks-java-extensions-protobuf:processResources NO-SOURCE > Task
> :beam-sdks-java-extensions-protobuf:processResources NO-SOURCE > Task
> :beam-sdks-java-core:generateAvroJava NO-SOURCE > Task
> :beam-vendor-grpc-v1_13_1:processResources NO-SOURCE > Task
> :beam-model-fn-execution:processResources UP-TO-DATE > Task
> :beam-model-pipeline:processResources UP-TO-DATE > Task
> :beam-runners-local-java-core:processResources NO-SOURCE > Task
> :beam-runners-java-fn-execution:processResources NO-SOURCE > Task
> :beam-runners-direct-java:processResources NO-SOURCE > Task
> :beam-vendor-grpc-v1_13_1:classes UP-TO-DATE > Task
> :beam-sdks-java-core:processResources UP-TO-DATE > Task
> :beam-runners-google-cloud-dataflow-java:processResources UP-TO-DATE > Task
> :beam-vendor-grpc-v1_13_1:shadowJar UP-TO-DATE > Task
> :beam-model-fn-execution:extractIncludeProto UP-TO-DATE > Task
> :beam-model-pipeline:extractIncludeProto UP-TO-DATE > Task
> :beam-model-job-management:extractIncludeProto UP-TO-DATE > Task
> :beam-model-pipeline:generateProto UP-TO-DATE > Task
> :beam-model-pipeline:compileJava UP-TO-DATE > Task
> :beam-model-pipeline:classes UP-TO-DATE > Task :beam-model-pipeline:shadowJar
> UP-TO-DATE > Task :beam-model-pipeline:jar UP-TO-DATE > Task
> :beam-model-job-management:extractProto UP-TO-DATE > Task
> :beam-model-fn-execution:extractProto UP-TO-DATE > Task
> :beam-model-fn-execution:generateProto UP-TO-DATE > Task
> :beam-model-job-management:generateProto UP-TO-DATE > Task
> :beam-model-job-management:compileJava UP-TO-DATE > Task
> :beam-model-job-management:classes UP-TO-DATE > Task
> :beam-model-fn-execution:compileJava UP-TO-DATE > Task
> :beam-model-fn-execution:classes UP-TO-DATE > Task
> :beam-model-job-management:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-core:compileJava UP-TO-DATE > Task
> :beam-sdks-java-core:classes UP-TO-DATE > Task
> :beam-model-fn-execution:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-core:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-extensions-protobuf:extractIncludeProto UP-TO-DATE > Task
> :beam-sdks-java-extensions-protobuf:generateProto NO-SOURCE > Task
> :beam-sdks-java-fn-execution:compileJava UP-TO-DATE > Task
> :beam-sdks-java-fn-execution:classes UP-TO-DATE > Task
> :beam-runners-local-java-core:compileJava UP-TO-DATE > Task
> :beam-runners-local-java-core:classes UP-TO-DATE > Task
> :beam-runners-core-construction-java:compileJava UP-TO-DATE > Task
> :beam-sdks-java-extensions-google-cloud-platform-core:compileJava UP-TO-DATE
> > Task :beam-sdks-java-extensions-google-cloud-platform-core:classes
> UP-TO-DATE > Task :beam-sdks-java-extensions-protobuf:compileJava UP-TO-DATE
> > Task :beam-sdks-java-extensions-protobuf:classes UP-TO-DATE > Task
> :beam-vendor-sdks-java-extensions-protobuf:compileJava UP-TO-DATE > Task
> :beam-vendor-sdks-java-extensions-protobuf:classes UP-TO-DATE > Task
> :beam-runners-core-construction-java:classes UP-TO-DATE > Task
> :beam-runners-local-java-core:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-fn-execution:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-extensions-protobuf:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-extensions-google-cloud-platform-core:shadowJar UP-TO-DATE >
> Task :beam-vendor-sdks-java-extensions-protobuf:shadowJar UP-TO-DATE > Task
> :beam-runners-core-construction-java:shadowJar UP-TO-DATE > Task
> :beam-runners-core-java:compileJava UP-TO-DATE > Task
> :beam-runners-core-java:classes UP-TO-DATE > Task
> :beam-runners-core-java:shadowJar UP-TO-DATE > Task
> :beam-sdks-java-io-google-cloud-platform:compileJava UP-TO-DATE > Task
> :beam-sdks-java-io-google-cloud-platform:classes UP-TO-DATE > Task
> :beam-sdks-java-harness:compileJava UP-TO-DATE > Task
> :beam-sdks-java-harness:classes UP-TO-DATE > Task :beam-sdks-java-harness:jar
> UP-TO-DATE > Task :beam-sdks-java-io-google-cloud-platform:shadowJar
> UP-TO-DATE > Task :beam-sdks-java-harness:shadowJar UP-TO-DATE > Task
> :beam-examples-java:compileJava UP-TO-DATE > Task :beam-examples-java:classes
> UP-TO-DATE > Task :beam-runners-java-fn-execution:compileJava UP-TO-DATE >
> Task :beam-runners-java-fn-execution:classes UP-TO-DATE > Task
> :beam-runners-java-fn-execution:shadowJar UP-TO-DATE > Task
> :beam-runners-google-cloud-dataflow-java:compileJava UP-TO-DATE > Task
> :beam-runners-google-cloud-dataflow-java:classes UP-TO-DATE > Task
> :beam-runners-direct-java:compileJava UP-TO-DATE > Task
> :beam-runners-direct-java:classes UP-TO-DATE > Task
> :beam-runners-google-cloud-dataflow-java:shadowJar UP-TO-DATE > Task
> :beam-runners-direct-java:shadowJar UP-TO-DATE > Task
> :beam-examples-java:WordCount.main() Nov 20, 2018 11:09:02 AM
> org.apache.beam.sdk.extensions.gcp.options.GcpOptions$DefaultProjectFactory
> create INFO: Inferred default GCP project 'google.com:clouddfe' from gcloud.
> If this is the incorrect project, please cancel this Pipeline and specify the
> command-line argument --project. Nov 20, 2018 11:09:02 AM
> com.google.auth.oauth2.DefaultCredentialsProvider
> warnAboutProblematicCredentials WARNING: Your application has authenticated
> using end user credentials from Google Cloud SDK. We recommend that most
> server applications use service accounts instead. If your application
> continues to use end user credentials from Cloud SDK, you might receive a
> "quota exceeded" or "API not enabled" error. For more information about
> service accounts, see https://cloud.google.com/docs/authentication/. Nov 20,
> 2018 11:09:03 AM
> org.apache.beam.sdk.extensions.gcp.options.GcpOptions$GcpTempLocationFactory
> tryCreateDefaultBucket INFO: No tempLocation specified, attempting to use
> default bucket: dataflow-staging-us-central1-927334603519 Nov 20, 2018
> 11:09:03 AM
> org.apache.beam.sdk.util.RetryHttpRequestInitializer$LoggingHttpBackOffHandler
> handleResponse WARNING: Request failed with code 409, performed 0 retries
> due to IOExceptions, performed 0 retries due to unsuccessful status codes,
> HTTP framework says request can be retried, (caller responsible for
> retrying):
> https://www.googleapis.com/storage/v1/b?predefinedAcl=projectPrivate&predefinedDefaultObjectAcl=projectPrivate&project=google.com:clouddfe
> Nov 20, 2018 11:09:03 AM
> org.apache.beam.runners.dataflow.options.DataflowPipelineOptions$StagingLocationFactory
> create INFO: No stagingLocation provided, falling back to gcpTempLocation
> Nov 20, 2018 11:09:04 AM org.apache.beam.runners.dataflow.DataflowRunner
> fromOptions INFO: PipelineOptions.filesToStage was not specified. Defaulting
> to files from the classpath: will stage 120 files. Enable logging at DEBUG
> level to see which files will be staged. Nov 20, 2018 11:09:04 AM
> org.apache.beam.runners.dataflow.DataflowRunner run INFO: Executing pipeline
> on the Dataflow Service, which will have billing implications related to
> Google Compute Engine usage and other Google Cloud Services. Nov 20, 2018
> 11:09:04 AM org.apache.beam.runners.dataflow.util.PackageUtil
> stageClasspathElements INFO: Uploading 120 files from
> PipelineOptions.filesToStage to staging location to prepare for execution.
> Nov 20, 2018 11:09:04 AM org.apache.beam.runners.dataflow.util.PackageUtil
> stageClasspathElements WARNING: Skipping non-existent file to stage
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/examples/java/build/resources/main.
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client/1.27.0/52d6076f58edfedb55526c0cac4ba155c6dc645b/google-http-client-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-1.27.0-XiDeJfgpNiiCZxI1a4Q27Q.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.oauth-client/google-oauth-client-java6/1.27.0/90570eedf1ae6188ee5028c11ec423fe52336373/google-oauth-client-java6-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-oauth-client-java6-1.27.0-VWEWXu2J1auJGvb5eLfbhg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-pubsub/v1-rev20181105-1.27.0/31f49e03ec44c72845a6a0572ccba74d7aee1f57/google-api-services-pubsub-v1-rev20181105-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-pubsub-v1-rev20181105-1.27.0-_3JmrTY281LX4AcOiP7LkQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-dataflow/v1b3-rev20181107-1.27.0/d0b3764765916d65fd75a52ae1a8cf13a150450a/google-api-services-dataflow-v1b3-rev20181107-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-dataflow-v1b3-rev20181107-1.27.0-ZCpCwXOULzPs_S2gBBH7TA.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.oauth-client/google-oauth-client/1.27.0/1979e3b499f1ef49959005ca59101791361abca9/google-oauth-client-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-oauth-client-1.27.0-waNrxu589XHIvY73DhrxNA.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-clouddebugger/v2-rev20180801-1.27.0/2866c6cfca03ef7bbf687ba3b62fa843377e664b/google-api-services-clouddebugger-v2-rev20180801-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-clouddebugger-v2-rev20180801-1.27.0-xrIcl7yUGCktYEb9_xtvjQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client-jackson2/1.27.0/1e1a91b684fc2e6e97d2369c7d8d1d9efc115543/google-api-client-jackson2-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-jackson2-1.27.0-M222JGX1Wh3HoNtcvwCaEw.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client-java6/1.27.0/2c06247935819b429424797d9844aa33955f4fb0/google-api-client-java6-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-java6-1.27.0-zPlGOao_rbTkn6baqjfy2w.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/io/google-cloud-platform/build/libs/beam-sdks-java-io-google-cloud-platform-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-io-google-cloud-platform-2.9.0-SNAPSHOT-xFFgKvdT2GS2td3iSJaJqQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-bigquery/v2-rev20181104-1.27.0/d539fb4b7ac318cadc344579fa6f80ae6cb8070e/google-api-services-bigquery-v2-rev20181104-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-bigquery-v2-rev20181104-1.27.0-oUKA0dpY-zmhgM8DIAq8Zg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-protobuf/1.27.0/b81859b8a284ea416d74644a70524c4d6be3e9a/google-http-client-protobuf-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-protobuf-1.27.0-1E0HsNaSwDtqOduUs-NssA.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-jackson/1.27.0/2b5eff5b6a343e716db21673743576db9a53e5ae/google-http-client-jackson-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-jackson-1.27.0-XQrBFTlCGWMqVu9vUPkMcw.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.api-client/google-api-client/1.27.0/7d498c11db92fb72521784212c953612a42e50db/google-api-client-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-client-1.27.0-UjH-dlvVLqF5D_4Trh8foQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-storage/v1-rev20181013-1.27.0/10593f99e06d962017b0663348ee1fed0f8528/google-api-services-storage-v1-rev20181013-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-storage-v1-rev20181013-1.27.0-Y65RPabM4FA03rQX5lUvKg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/model/job-management/build/libs/beam-model-job-management-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-model-job-management-2.9.0-SNAPSHOT-PRRkG0x941fKfrBa33f-Qg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.apis/google-api-services-cloudresourcemanager/v1-rev20181015-1.27.0/354796285db3b2fce9089147db3fe1f2a8cca444/google-api-services-cloudresourcemanager-v1-rev20181015-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-api-services-cloudresourcemanager-v1-rev20181015-1.27.0-SQfpsiUEwOlTbzfnWC4EzA.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/model/pipeline/build/libs/beam-model-pipeline-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-model-pipeline-2.9.0-SNAPSHOT-XD9dbaFWaiUJnjDcCAkrOg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/org.apache.httpcomponents/httpcore/4.4.9/a86ce739e5a7175b4b234c290a00a5fdb80957a0/httpcore-4.4.9.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/httpcore-4.4.9-uJRVUHg5wJ1hGWYd79IWag.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/extensions/protobuf/build/libs/beam-sdks-java-extensions-protobuf-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-extensions-protobuf-2.9.0-SNAPSHOT-4q_iSCAMQwCGDowjT6QSKw.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/extensions/google-cloud-platform-core/build/libs/beam-sdks-java-extensions-google-cloud-platform-core-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-extensions-google-cloud-platform-core-2.9.0-SNAPSHOT-qVkigYLlAbU1ujEdaTQZ0Q.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/org.apache.httpcomponents/httpclient/4.5.5/1603dfd56ebcd583ccdf337b6c3984ac55d89e58/httpclient-4.5.5.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/httpclient-4.5.5-l-flsTVHa30lpasx4epJIg.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/core-construction-java/build/libs/beam-runners-core-construction-java-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-core-construction-java-2.9.0-SNAPSHOT-XbHTR4DoxxMKF5aSfjqWYQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/.gradle/caches/modules-2/files-2.1/com.google.http-client/google-http-client-jackson2/1.27.0/25ee9240648b5dfd01a3385937bcf332b6f222a7/google-http-client-jackson2-1.27.0.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/google-http-client-jackson2-1.27.0-s26dP1SAvqbYDq_HiJ9Bbw.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/direct-java/build/libs/beam-runners-direct-java-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-direct-java-2.9.0-SNAPSHOT-jn9fS-cJcB_-dTRLAU-meQ.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/runners/google-cloud-dataflow-java/build/libs/beam-runners-google-cloud-dataflow-java-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-runners-google-cloud-dataflow-java-2.9.0-SNAPSHOT-oQdbiEtnM6C2iUC5axtRjA.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/examples/java/build/classes/java/main
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/main-cxzukVaCwjdrlMI-NQ8Jew.jar
> Nov 20, 2018 11:09:05 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/vendor/grpc-v1_13_1/build/libs/beam-vendor-grpc-v1_13_1-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-vendor-grpc-v1_13_1-2.9.0-SNAPSHOT-E8CXf40HkGDxZZU040A-bA.jar
> Nov 20, 2018 11:09:06 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading
> /usr/local/google/home/goenka/d/work/tmp/tmp_beam/beam/sdks/java/core/build/libs/beam-sdks-java-core-2.9.0-SNAPSHOT.jar
> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/beam-sdks-java-core-2.9.0-SNAPSHOT-Aoyvg0doZsYvr7jINSJAKA.jar
> Nov 20, 2018 11:09:21 AM org.apache.beam.runners.dataflow.util.PackageUtil
> stageClasspathElements INFO: Staging files complete: 91 files cached, 28
> files newly uploaded Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding ReadLines/Read as step s1 Nov 20, 2018 11:09:21 AM
> org.apache.beam.sdk.io.FileBasedSource getEstimatedSizeBytes INFO:
> Filepattern gs://apache-beam-samples/shakespeare/kinglear.txt matched 1 files
> with total size 157283 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding WordCount.CountWords/ParDo(ExtractWords) as step s2 Nov
> 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding WordCount.CountWords/Count.PerElement/Init/Map as step
> s3 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey as
> step s4 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
> as step s5 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding MapElements/Map as step s6 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign
> as step s7 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles
> as step s8 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten as
> step s9 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten as
> step s10 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum as step
> s11 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/Flatten.PCollections
> as step s12 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)
> as step s13 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/CreateDataflowView
> as step s14 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Create.Values/Read(CreateSource)
> as step s15 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
> as step s16 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
> as step s17 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize
> as step s18 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair
> with random key as step s19 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
> as step s20 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey
> as step s21 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
> as step s22 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowPipelineTranslator$Translator
> addStep INFO: Adding
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Values/Values/Map
> as step s23 Nov 20, 2018 11:09:21 AM
> org.apache.beam.runners.dataflow.DataflowRunner run INFO: Staging pipeline
> description to gs://dataflow-staging-us-central1-927334603519/temp/staging/
> Nov 20, 2018 11:09:22 AM org.apache.beam.runners.dataflow.util.PackageUtil
> tryStagePackage INFO: Uploading <63064 bytes, hash 1eMyLHEGQDSnbL_rtt2mOQ> to
> gs://dataflow-staging-us-central1-927334603519/temp/staging/pipeline-1eMyLHEGQDSnbL_rtt2mOQ.pb
> Dataflow SDK version: 2.9.0-SNAPSHOT Nov 20, 2018 11:09:24 AM
> org.apache.beam.runners.dataflow.DataflowRunner run Submitted job:
> 2018-11-20_11_09_23-3437480453113408186 INFO: To access the Dataflow
> monitoring console, please navigate to
> https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-11-20_11_09_23-3437480453113408186?project=google.com%3Aclouddfe
> Nov 20, 2018 11:09:24 AM org.apache.beam.runners.dataflow.DataflowRunner run
> INFO: To cancel the job using the 'gcloud' tool, run: > gcloud dataflow jobs
> --project=google.com:clouddfe cancel --region=us-central1
> 2018-11-20_11_09_23-3437480453113408186 Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:23.285Z: Autoscaling is enabled for job
> 2018-11-20_11_09_23-3437480453113408186. The number of workers will be
> between 1 and 1000. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:23.346Z: Autoscaling was automatically enabled for job
> 2018-11-20_11_09_23-3437480453113408186. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:26.020Z: Checking permissions granted to controller
> Service Account. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:30.430Z: Worker configuration: n1-standard-1 in
> us-central1-a. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.335Z: Expanding CoGroupByKey operations into
> optimizable parts. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.555Z: Expanding GroupByKey operations into
> optimizable parts. Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.614Z: Lifting ValueCombiningMappingFns into
> MergeBucketsMappingFns Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.886Z: Fusing adjacent ParDo, Read, Write, and
> Flatten operations Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.941Z: Fusing consumer
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
> into
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Create.Values/Read(CreateSource)
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:31.994Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair
> with random key Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.044Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/GroupByWindow
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.099Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Values/Values/Map
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/ExpandIterable
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.152Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Reify
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/Window.Into()/Window.Assign
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.196Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Write
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Reify
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.244Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/GroupByWindow
> into
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Read
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.312Z: Fusing consumer
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
> into
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Reify.ReifyView/ParDo(Anonymous)
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.364Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize into
> WriteCounts/WriteFiles/GatherTempFileResults/Reify.ReifyViewInGlobalWindow/Values/Values/Map
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.410Z: Fusing consumer
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Pair
> with random key into WriteCounts/WriteFiles/FinalizeTempFileBundles/Finalize
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.459Z: Unzipping flatten s12 for input
> s11.org.apache.beam.sdk.values.PCollection.<init>:402#20ff67585e33a8f6 Nov
> 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.491Z: Fusing unzipped copy of
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow),
> through flatten
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/Flatten.PCollections,
> into producer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum Nov 20,
> 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.536Z: Fusing consumer
> WriteCounts/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)
> into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.575Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/DropShardNum into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten Nov
> 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.627Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
> into
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Read
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.686Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
> into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.726Z: Fusing consumer
> WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign into MapElements/Map
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.781Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Extract
> into
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.838Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnwritten into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/GroupByWindow
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.884Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial
> into WordCount.CountWords/Count.PerElement/Init/Map Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.928Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write
> into
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:32.975Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles
> into WriteCounts/WriteFiles/RewindowIntoGlobal/Window.Assign Nov 20, 2018
> 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.029Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Init/Map into
> WordCount.CountWords/ParDo(ExtractWords) Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.074Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/GroupByWindow
> into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Read
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.112Z: Fusing consumer MapElements/Map into
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Extract
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.152Z: Fusing consumer
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify
> into
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.190Z: Fusing consumer
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify
> into
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.245Z: Fusing consumer
> WordCount.CountWords/ParDo(ExtractWords) into ReadLines/Read Nov 20, 2018
> 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.764Z: Executing operation
> WriteCounts/WriteFiles/FinalizeTempFileBundles/Reshuffle.ViaRandomKey/Reshuffle/GroupByKey/Create
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.810Z: Executing operation
> WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Create
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.850Z: Executing operation
> WriteCounts/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Create
> Nov 20, 2018 11:09:34 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:33.889Z: Starting 1 workers in us-central1-a... Nov
> 20, 2018 11:09:37 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:34.261Z: Executing operation
> ReadLines/Read+WordCount.CountWords/ParDo(ExtractWords)+WordCount.CountWords/Count.PerElement/Init/Map+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write
> Nov 20, 2018 11:09:44 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:09:43.287Z: Autoscaling: Raised the number of workers to
> 0 based on the rate of progress in the currently running step(s). Nov 20,
> 2018 11:10:16 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:10:15.798Z: Autoscaling: Raised the number of workers to
> 1 based on the rate of progress in the currently running step(s). Nov 20,
> 2018 11:10:16 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:10:15.859Z: Autoscaling: Would further reduce the number
> of workers but reached the minimum number allowed for the job. Nov 20, 2018
> 11:10:22 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:10:21.327Z: Workers have started successfully. Nov 20,
> 2018 11:10:22 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:10:21.511Z: Workers have started successfully. Nov 20,
> 2018 11:10:35 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> SEVERE: 2018-11-20T19:10:32.138Z: java.lang.RuntimeException:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745) Caused by:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
> at
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
> ... 14 more Caused by: java.lang.IllegalArgumentException: unable to
> deserialize Serialized DoFnInfo at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
> ... 21 more Caused by: java.io.InvalidClassException:
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible:
> stream classdesc serialVersionUID = -6607512772692666907, local class
> serialVersionUID = -3616600070988263902 at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
> ... 28 more Nov 20, 2018 11:10:39 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> SEVERE: 2018-11-20T19:10:36.934Z: java.lang.RuntimeException:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745) Caused by:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
> at
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
> ... 14 more Caused by: java.lang.IllegalArgumentException: unable to
> deserialize Serialized DoFnInfo at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
> ... 21 more Caused by: java.io.InvalidClassException:
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible:
> stream classdesc serialVersionUID = -6607512772692666907, local class
> serialVersionUID = -3616600070988263902 at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
> ... 28 more Nov 20, 2018 11:10:45 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> SEVERE: 2018-11-20T19:10:44.927Z: java.lang.RuntimeException:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745) Caused by:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
> at
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
> ... 14 more Caused by: java.lang.IllegalArgumentException: unable to
> deserialize Serialized DoFnInfo at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
> ... 21 more Caused by: java.io.InvalidClassException:
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible:
> stream classdesc serialVersionUID = -6607512772692666907, local class
> serialVersionUID = -3616600070988263902 at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
> ... 28 more Nov 20, 2018 11:11:01 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> SEVERE: 2018-11-20T19:11:00.729Z: java.lang.RuntimeException:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:193)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:164)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
> at
> org.apache.beam.runners.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.create(IntrinsicMapTaskExecutorFactory.java:124)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:337)
> at
> org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:291)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:135)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:115)
> at
> org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:102)
> at java.util.concurrent.FutureTask.run(FutureTask.java:266) at
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
> at
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
> at java.lang.Thread.run(Thread.java:745) Caused by:
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.util.concurrent.UncheckedExecutionException:
> java.lang.IllegalArgumentException: unable to deserialize Serialized
> DoFnInfo at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2214)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache.get(LocalCache.java:4053)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4899)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:91)
> at
> org.apache.beam.runners.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:75)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.createParDoOperation(IntrinsicMapTaskExecutorFactory.java:263)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory.access$000(IntrinsicMapTaskExecutorFactory.java:85)
> at
> org.apache.beam.runners.dataflow.worker.IntrinsicMapTaskExecutorFactory$1.typedApply(IntrinsicMapTaskExecutorFactory.java:182)
> ... 14 more Caused by: java.lang.IllegalArgumentException: unable to
> deserialize Serialized DoFnInfo at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:74)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory$UserDoFnExtractor.getDoFnInfo(UserParDoFnFactory.java:62)
> at
> org.apache.beam.runners.dataflow.worker.UserParDoFnFactory.lambda$create$0(UserParDoFnFactory.java:93)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LocalManualCache$1.load(LocalCache.java:4904)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3628)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2336)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2295)
> at
> org.apache.beam.runners.dataflow.worker.repackaged.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2208)
> ... 21 more Caused by: java.io.InvalidClassException:
> org.apache.beam.sdk.values.WindowingStrategy; local class incompatible:
> stream classdesc serialVersionUID = -6607512772692666907, local class
> serialVersionUID = -3616600070988263902 at
> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:616) at
> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1630) at
> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1521) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1781) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2018) at
> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1942) at
> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1808) at
> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1353) at
> java.io.ObjectInputStream.readObject(ObjectInputStream.java:373) at
> org.apache.beam.sdk.util.SerializableUtils.deserializeFromByteArray(SerializableUtils.java:71)
> ... 28 more Nov 20, 2018 11:11:01 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> SEVERE: 2018-11-20T19:11:00.885Z: Workflow failed. Causes:
> S02:ReadLines/Read+WordCount.CountWords/ParDo(ExtractWords)+WordCount.CountWords/Count.PerElement/Init/Map+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/Combine.GroupedValues/Partial+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Reify+WordCount.CountWords/Count.PerElement/Combine.perKey(Count)/GroupByKey/Write
> failed., A work item was attempted 4 times without success. Each time the
> worker eventually lost contact with the service. The work item was attempted
> on: wordcount-goenka-11201909-11201109-n7q9-harness-6q3n,
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n,
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n,
> wordcount-goenka-11201909-11201109-n7q9-harness-6q3n Nov 20, 2018 11:11:02 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:11:01.059Z: Cleaning up. Nov 20, 2018 11:11:02 AM
> org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
> INFO: 2018-11-20T19:11:01.473Z: Stopping worker pool...
> {code}
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)