See
<https://builds.apache.org/job/beam_PostCommit_Python35/2289/display/redirect?page=changes>
Changes:
[github] [BEAM-9674] Don't specify selected fields when fetching BigQuery table
------------------------------------------
[...truncated 7.90 MB...]
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s4"
},
"serialized_fn":
"eNqVlNtyFDcQhpezEYSTyYEQCAEDCwljCJCDQyAsi8HYGB8WPDculWZGu6tCI01LGrt8MZXkIuVUnoEH4zq3PARpaQc7m+CtorZqd6ZX/XX33936bU8zZQVL+5wmnOWR0FEvLaJE9KDkZoN2heRUapZZssKEm9amtfhEJ5ZA48rvsKuC3c34UKPRcNw6mkrBlYM988FE3UbBaV8oZ2FvmgkpI+q/CU0NZ45TxXKeubKQnMC+4DLzrIM+j4PL/mARqihdIFk4EB9Giy7dtmks3u9NRvSEgoObQOJz+P7vgvy5kEOU8VQb5rSxcGgTDi/BR/Pzq3BkE44uwbEyWYXjw0oox41iMiqdkOS+6ZU5FrcgWcr7WmbcEDiBEoyzCk6uwsdxBwMXG66vFbUi4zRkfv3aihGOd3RL9Ba9oJPvHlrMpf1pVHfOiztZi9tGGYViTmjl7V5p+GQorWKNyZIT+lwVIn0pebaMwWZ8LAKfVvBZM96LiWTMMTj1Pset4208QuBzLOF0BV804yNeuDTl1tKCOV87nIlPo9F7T22XNIX1GJZIPrV2A87GB/DEmuDrtKvgy6Eu+6kJbSJwLiQlhXXw1R9wfgkuxMfRsC5UptdpzopCqJ4nTAyl7AxTtqtNbiMfP4S3hPakTpik//MmcLFM4n0ITjX2By4NwYLN1j9kJTjz7IUX5YEO7byMWjQruNKMx31yBsE8o0E2OiBeHUGcZtYtGJELJ9a4rZFfI/KbCq7FR/1CdJmUCUtf1rRoBG0hNLemTCLlegU3EpsEkBM5zgnLixr07QhQ593ZmnUTWbcquJ2EZao1HGC+G4F5FDQfqFaTvkfSDxX8GFZwIBNM7dS/QaAhDIGfkHCngp/DdFCuMrg75O83z0Zb1W7XQuAeuv5Swf0QPBep0RZaf429ftX8+83bt7/axH/KpD/Rv1huwgO2U154JfB6GHCICLSR+7CC6WY8tiUPzuWjD6gL78bHSJmJj/mNktKPGZV44SncLXgyssJ2acLyE5hFxFwFT/ut2Qa2/cR/2p4nQmHL5md3xQf9X3gD9rjBTJ/tlGl9hLR5l5XSdepXWMBAi2EdcfvLvJQhAZpjk2EJ8eEqF5ZmAz9Y/rNMHHSifwCSjTGG",
"user_name":
"WriteToBigQuery/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs"
}
},
{
"kind": "Flatten",
"name": "s42",
"properties": {
"display_data": [],
"inputs": [
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s24"
},
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s39"
}
],
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_6"
},
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_6"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_6"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "WriteToBigQuery/BigQueryBatchFileLoads/Flatten.out"
}
],
"user_name": "WriteToBigQuery/BigQueryBatchFileLoads/Flatten"
}
}
],
"type": "JOB_TYPE_BATCH"
}
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2020-04-17_07_50_02-212889806776169839 is in state JOB_STATE_DONE
INFO:apache_beam.runners.dataflow.internal.apiclient:Create job: <Job
createTime: '2020-04-17T15:10:05.029291Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2020-04-17_08_10_03-2168704513013211506'
location: 'us-central1'
name: 'beamapp-jenkins-0417150947-151768'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2020-04-17T15:10:05.029291Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:apache_beam.runners.dataflow.internal.apiclient:Created job with id:
[2020-04-17_08_10_03-2168704513013211506]
INFO:apache_beam.runners.dataflow.internal.apiclient:To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_10_03-2168704513013211506?project=apache-beam-testing
INFO:apache_beam.testing.pipeline_verifiers:Wait 20 seconds...
INFO:apache_beam.io.gcp.datastore.v1new.datastore_write_it_pipeline:Querying a
limited set of 500 entities and verifying count.
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.5 interpreter.
INFO:root:Using Python SDK docker image: apache/beam_python3.5_sdk:2.22.0.dev.
If the image is not available at local, we will try to pull from hub.docker.com
WARNING:apache_beam.runners.dataflow.dataflow_runner:Typical end users should
not use this worker jar feature. It can only be used when FnAPI is enabled.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pipeline.pb...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pipeline.pb
in 0 seconds.
INFO:apache_beam.runners.portability.stager:Executing command:
['<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/build/gradleenv/-1734967054/bin/python',>
'-m', 'pip', 'download', '--dest', '/tmp/dataflow-requirements-cache', '-r',
'postcommit_requirements.txt', '--exists-action', 'i', '--no-binary', ':all:']
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.032Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
INFO:apache_beam.runners.portability.stager:Copying Beam SDK
"<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/build/apache-beam.tar.gz">
to staging location.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/requirements.txt...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/requirements.txt
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.4.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.4.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/six-1.14.0.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/six-1.14.0.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.3.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.3.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.1.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/parameterized-0.7.1.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/mock-2.0.0.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/mock-2.0.0.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pbr-5.4.4.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pbr-5.4.4.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/funcsigs-1.0.2.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/funcsigs-1.0.2.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pbr-5.4.5.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/pbr-5.4.5.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/PyHamcrest-1.10.1.tar.gz...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/PyHamcrest-1.10.1.tar.gz
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/dataflow_python_sdk.tar...
INFO:apache_beam.runners.dataflow.internal.apiclient:Completed GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/dataflow_python_sdk.tar
in 0 seconds.
INFO:apache_beam.runners.dataflow.internal.apiclient:Starting GCS upload to
gs://temp-storage-for-end-to-end-tests/staging-it/beamapp-jenkins-0417150202-193137.1587135722.193301/beamapp-jenkins-0417150202-193137.1587136207.487814/dataflow-worker.jar...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:13.123Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on
the rate of progress in the currently running stage(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2020-04-17_08_10_03-2168704513013211506 is in state JOB_STATE_RUNNING
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:15.667Z:
JOB_MESSAGE_BASIC: Executing BigQuery import job
"dataflow_job_11166966061264166048". You can check its status with the bq tool:
"bq show -j --project_id=apache-beam-testing dataflow_job_11166966061264166048".
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:03.478Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2020-04-17_08_10_03-2168704513013211506. The number of workers will be between
1 and 1000.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:03.479Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2020-04-17_08_10_03-2168704513013211506.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:06.940Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:07.817Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-f.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.468Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.503Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables:
GroupByKey not followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.523Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations:
GroupByKey not followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.555Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows: GroupByKey not
followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.595Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.629Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.765Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.833Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.867Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s18 for input s11.WrittenFiles
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.899Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround, through flatten
WriteToBigQuery/BigQueryBatchFileLoads/DestinationFilesUnion, into producer
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.930Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write into
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:09.966Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow
into WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.003Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
into
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.035Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/ParDo(TriggerLoadJobs)
into
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.059Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/TriggerLoadJobsWithoutTempTables
into
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.099Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s18-u32 for input s19.None-c30
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.135Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify,
through flatten
WriteToBigQuery/BigQueryBatchFileLoads/DestinationFilesUnion/Unzipped-1, into
producer WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.166Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround into
WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.199Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify into
WriteToBigQuery/BigQueryBatchFileLoads/IdentityWorkaround
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.234Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RewindowIntoGlobal into CreateInput/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.272Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/AppendDestination into
WriteToBigQuery/BigQueryBatchFileLoads/RewindowIntoGlobal
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.305Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
into WriteToBigQuery/BigQueryBatchFileLoads/AppendDestination
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.344Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(_ShardDestinations) into
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.378Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Reify into
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(_ShardDestinations)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.410Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Write into
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.442Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow into
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.467Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/DropShardNumber into
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.502Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
into WriteToBigQuery/BigQueryBatchFileLoads/DropShardNumber
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.535Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/Map(<lambda at
bigquery_file_loads.py:870>) into
WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.567Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/GenerateFilePrefix into
WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.603Z:
JOB_MESSAGE_DETAILED: Fusing siblings
WriteToBigQuery/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs
and
WriteToBigQuery/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.635Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs)
into
WriteToBigQuery/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.659Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables
into WriteToBigQuery/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.691Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue into
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.717Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify
into WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.746Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write
into
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.778Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
into
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.810Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames into
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.845Z:
JOB_MESSAGE_DETAILED: Fusing consumer
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/Delete into
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.877Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.906Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.941Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:10.975Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.268Z:
JOB_MESSAGE_DEBUG: Executing wait step start46
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.341Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read+WriteToBigQuery/BigQueryBatchFileLoads/Map(<lambda
at
bigquery_file_loads.py:870>)+WriteToBigQuery/BigQueryBatchFileLoads/GenerateFilePrefix
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.363Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.387Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.396Z:
JOB_MESSAGE_BASIC: Executing operation CreateSchema/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.418Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.418Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.441Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.468Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.476Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.476Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.480Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.514Z:
JOB_MESSAGE_BASIC: Finished operation CreateSchema/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.521Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.534Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Session"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.566Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/GroupShardedRows/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.604Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/ImpulseEmptyPC/Read.out" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.639Z:
JOB_MESSAGE_DEBUG: Value "CreateSchema/Read.out" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.663Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Session"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.695Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.727Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.730Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.752Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.767Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.784Z:
JOB_MESSAGE_BASIC: Executing operation
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.786Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.809Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.811Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.842Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.867Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.896Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:17.619Z:
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric
descriptors and Stackdriver will not create new Dataflow custom metrics for
this job. Each unique user-defined metric name (independent of the DoFn in
which it is defined) produces a new metric descriptor. To delete old / unused
metric descriptors see
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
test_bigquery_tornadoes_it
(apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT)
... ok
test_streaming_wordcount_debugging_it
(apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT)
... SKIP: TODO(BEAM-8078): This test is failing
test_autocomplete_it
(apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it
(apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT)
... ok
test_leader_board_it
(apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it
(apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_streaming_wordcount_it
(apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT)
... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_hourly_team_score_it
(apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT)
... Failure: ConnectionRefusedError ([Errno 111] Connection refused) ... ERROR
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... Failure:
ConnectionRefusedError ([Errno 111] Connection refused) ... ERROR
Terminated
The message received from the daemon indicates that the daemon has disappeared.
Build request sent: Build{id=1b4f31d0-47cc-4764-b087-9b65160920e1,
currentDir=<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src}>
Attempting to read last messages from the daemon log...
Daemon pid: 3571
log file: /home/jenkins/.gradle/daemon/5.2.1/daemon-3571.out.log
----- Last 20 lines from daemon log file - daemon-3571.out.log -----
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.786Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.809Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.811Z:
JOB_MESSAGE_BASIC: Finished operation
WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/_UnpickledSideInput(Read.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.842Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.867Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:11.896Z:
JOB_MESSAGE_DEBUG: Value
"WriteToBigQuery/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/_UnpickledSideInput(Read.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-04-17T15:10:17.619Z:
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric
descriptors and Stackdriver will not create new Dataflow custom metrics for
this job. Each unique user-defined metric name (independent of the DoFn in
which it is defined) produces a new metric descriptor. To delete old / unused
metric descriptors see
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
test_bigquery_tornadoes_it
(apache_beam.examples.cookbook.bigquery_tornadoes_it_test.BigqueryTornadoesIT)
... ok
test_streaming_wordcount_debugging_it
(apache_beam.examples.streaming_wordcount_debugging_it_test.StreamingWordcountDebuggingIT)
... SKIP: TODO(BEAM-8078): This test is failing
test_autocomplete_it
(apache_beam.examples.complete.autocomplete_test.AutocompleteTest) ... ok
test_datastore_wordcount_it
(apache_beam.examples.cookbook.datastore_wordcount_it_test.DatastoreWordCountIT)
... ok
test_leader_board_it
(apache_beam.examples.complete.game.leader_board_it_test.LeaderBoardIT) ... ok
test_game_stats_it
(apache_beam.examples.complete.game.game_stats_it_test.GameStatsIT) ... ok
test_streaming_wordcount_it
(apache_beam.examples.streaming_wordcount_it_test.StreamingWordCountIT) ... ok
test_wordcount_fnapi_it (apache_beam.examples.wordcount_it_test.WordCountIT)
... ok
test_wordcount_it (apache_beam.examples.wordcount_it_test.WordCountIT) ... ok
test_hourly_team_score_it
(apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT)
... Failure: ConnectionRefusedError ([Errno 111] Connection refused) ... ERROR
test_avro_it (apache_beam.examples.fastavro_it_test.FastavroIT) ... Failure:
ConnectionRefusedError ([Errno 111] Connection refused) ... ERROR
Terminated
Daemon vm is shutting down... The daemon has exited normally or was terminated
in response to a user interrupt.
----- End of the daemon log -----
FAILURE: Build failed with an exception.
* What went wrong:
Gradle build daemon disappeared unexpectedly (it may have been killed or may
have crashed)
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]