See
<https://ci-beam.apache.org/job/beam_PostCommit_Python37/3262/display/redirect>
Changes:
------------------------------------------
[...truncated 53.18 MB...]
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "WaitForBQJobs",
"type": "STRING",
"value": "apache_beam.io.gcp.bigquery_file_loads.WaitForBQJobs"
}
],
"non_parallel_inputs": {
"python_side_input0-write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s46"
}
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
},
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s45"
},
"serialized_fn":
"QlpoOTFBWSZTWbr5ox8AAxV/4///////////////wr/v/6Li0AAEAEAQCAAA0AM+83pygGRg1NEyp6Q0Bo0MgxGRoPSAABoNGmgeo0ANMTTEGmj1AxAgwAAAAAAAAAAAAAAAAAAAABEzUJqnlGgGgaDIAAANAAAAAAAGgAAABE/VKan6o00AA0ADTQA0aA0AA0xGgAAaAwRoMhkAkkJkBNMUwg0CaaQ2o9R6mNJ6TQ00AaGQAAAGmhp6E9TRo1U1rie267Z1rKIzunL/YhVkwvjiwy3IS7LAjrMgYcd05VACgA+1r8pMnz9gPFstS2DGwLZK7/RppSN28egaY4UjgQQQEAEABEQFJIWLREvuJ1hPrtk0rbN4+0oezF3meXoeyK4CK9F0i5MKJgQ9TgHMRgRJIHE1L5F8gNGoiRrApV3FpvScYTXzhVK4H5IwRvrIBRvUKIVOkjw4OmQwYK59WBPnLCbX2ARWlCLLzuBFiuZd6sKCUu3gOClR2YnbM00JcbW45iJ1/Q5dJK8I66o9mysuytemADsmmGCTC7AwMaAKpTarIKeQfMbzqXTRkOdtP4P0gQxIC7hD+Cl4ccuidti0QRsFYjjEpMO4iHW9C/nU3z1Odfadj8sp3ZZQ4EBhDRh2I/i2B1HKZPXnTeeDLlN+6R40cSgoS5x3784MB7h06IBwDAnx2ETTyimaUAlF2rrsCHk5uApqCjW3Go3MTns/dHddAQhAhNvCCUIb5Hl+NM+12OmJbhZHSYTRZhyL2EQN6oqmjSKuqu8YMWPdRRFjLjTwk0cNf3LRBXJreO67rmNSzaXkWhbVv7+mDw7gxFs5wDJmYndjLDWwz6eg+F1Svnw2C3dzy1FokEkatGzDawycNA1OR4VFJzFkqmKyKSIlbn7cwqGjMK9EdacHzRBZ2ZlD3+q8ZlGQUIkQZkzedYMwc0zdFGCS0NJOq2yBfYjUmftgVsqlJlagMOJarLM4zTJ6K22Cvi5P6j4kGUu+5ve5HOcxzg1YgRgdemszf+X4a3OAAAAAAAAogBDAEnCWtcACIjzQGPOgKM4iKMRhRVk0sp5ma2vNlW5FE0orkAHsVBW0/izvRrwbYGUZU4atPfYyo2ebsWFJiljqga7AWZQYrmuZ0CLxMTMNCm3akrB5POp2mnhRXOKeMVK7GRY7fc/ympHO+DNUaNTS4eYK1Z3V6ji4zZckkabINgsoggox3Qb3ZAVkayaah/En5+vulTg/1r7AxqlKnUwnMT8kuP1v/f4u5IpwoSF180Y+",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs"
}
},
{
"kind": "Flatten",
"name": "s48",
"properties": {
"display_data": [],
"inputs": [
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s44"
},
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s28"
}
],
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
},
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/BigQueryBatchFileLoads/Flatten.out"
}
],
"user_name": "write/BigQueryBatchFileLoads/Flatten"
}
}
],
"type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: '2020-12-29T06:38:20.291266Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2020-12-28_22_38_18-14955863395432011235'
location: 'us-central1'
name: 'beamapp-jenkins-1229063810-514820'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2020-12-29T06:38:20.291266Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2020-12-28_22_38_18-14955863395432011235]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job:
2020-12-28_22_38_18-14955863395432011235
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-12-28_22_38_18-14955863395432011235?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-12-28_22_38_18-14955863395432011235 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:18.340Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2020-12-28_22_38_18-14955863395432011235.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:18.340Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2020-12-28_22_38_18-14955863395432011235. The number of workers will be between
1 and 1000.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.136Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-f.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.675Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.724Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables: GroupByKey not
followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.758Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations: GroupByKey not
followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.791Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/GroupShardedRows: GroupByKey not followed by a
combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.824Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.856Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:23.973Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.024Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.066Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s23 for input s17.WrittenFiles
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.097Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
write/BigQueryBatchFileLoads/IdentityWorkaround, through flatten
write/BigQueryBatchFileLoads/DestinationFilesUnion, into producer
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.121Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.150Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.176Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles) into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.212Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/ParDo(TriggerLoadJobs)
into write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.241Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/TriggerLoadJobsWithoutTempTables
into write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.264Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s23-u32 for input s24.None-c30
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.290Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify, through
flatten write/BigQueryBatchFileLoads/DestinationFilesUnion/Unzipped-1, into
producer write/BigQueryBatchFileLoads/IdentityWorkaround
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.314Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/IdentityWorkaround into
write/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.335Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify into
write/BigQueryBatchFileLoads/IdentityWorkaround
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.359Z:
JOB_MESSAGE_DETAILED: Fusing consumer
read/_PassThroughThenCleanup/ParDo(PassThrough)/ParDo(PassThrough) into
read/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.394Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RewindowIntoGlobal into
read/_PassThroughThenCleanup/ParDo(PassThrough)/ParDo(PassThrough)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.439Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/AppendDestination into
write/BigQueryBatchFileLoads/RewindowIntoGlobal
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.462Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
into write/BigQueryBatchFileLoads/AppendDestination
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.485Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(_ShardDestinations) into
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.509Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/Reify into
write/BigQueryBatchFileLoads/ParDo(_ShardDestinations)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.533Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/Write into
write/BigQueryBatchFileLoads/GroupShardedRows/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.558Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow into
write/BigQueryBatchFileLoads/GroupShardedRows/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.585Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/DropShardNumber into
write/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.618Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
into write/BigQueryBatchFileLoads/DropShardNumber
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.642Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/LoadJobNamePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.669Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/CopyJobNamePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.713Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GenerateFilePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.736Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
into write/BigQueryBatchFileLoads/ImpulseMonitorLoadJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.760Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs) into
write/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.785Z:
JOB_MESSAGE_DETAILED: Fusing consumer read/MapFilesToRemove into
read/FilesToRemoveImpulse/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.808Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs into
write/BigQueryBatchFileLoads/ImpulseMonitorCopyJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.830Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables into
write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.857Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue into
write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.878Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify into
write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.901Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write into
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.923Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
into write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.954Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames into
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:24.980Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/Delete into
write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.009Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs
into write/BigQueryBatchFileLoads/ImpulseMonitorDestLoadJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.030Z:
JOB_MESSAGE_DETAILED: Fusing consumer
read/_PassThroughThenCleanup/ParDo(RemoveExtractedFiles)/ParDo(RemoveExtractedFiles)
into read/_PassThroughThenCleanup/Create/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.059Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.080Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.099Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.124Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.661Z:
JOB_MESSAGE_DEBUG: Executing wait step start49
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.715Z:
JOB_MESSAGE_BASIC: Executing operation
read/FilesToRemoveImpulse/Read+read/MapFilesToRemove
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.732Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read+write/BigQueryBatchFileLoads/LoadJobNamePrefix+write/BigQueryBatchFileLoads/CopyJobNamePrefix+write/BigQueryBatchFileLoads/GenerateFilePrefix
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.744Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.755Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/GroupShardedRows/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.767Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.776Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.802Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.811Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.811Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/GroupShardedRows/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.829Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.840Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.848Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.860Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Session"
materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.882Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/GroupShardedRows/Session" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.910Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Session"
materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:25.930Z:
JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read.out"
materialized.
oauth2client.transport: INFO: Refreshing due to a 401 (attempt 1/2)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:48.853Z:
JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric
descriptors, so new user metrics of the form custom.googleapis.com/* will not
be created. However, all user metrics are also available in the metric
dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics,
you can delete old / unused metric descriptors. See
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:38:54.906Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on
the rate of progress in the currently running stage(s).
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:39:27.485Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:39:27.519Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:44:09.713Z:
JOB_MESSAGE_BASIC: Finished operation
read/FilesToRemoveImpulse/Read+read/MapFilesToRemove
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:44:09.765Z:
JOB_MESSAGE_DEBUG: Value "read/MapFilesToRemove.out" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:44:09.815Z:
JOB_MESSAGE_BASIC: Executing operation
read/_PassThroughThenCleanup/ParDo(RemoveExtractedFiles)/_UnpickledSideInput(MapFilesToRemove.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:44:09.864Z:
JOB_MESSAGE_BASIC: Finished operation
read/_PassThroughThenCleanup/ParDo(RemoveExtractedFiles)/_UnpickledSideInput(MapFilesToRemove.out.0)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-12-29T06:44:09.958Z:
JOB_MESSAGE_DEBUG: Value
"read/_PassThroughThenCleanup/ParDo(RemoveExtractedFiles)/_UnpickledSideInput(MapFilesToRemove.out.0).output"
materialized.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting
for job 2020-12-28_22_38_18-14955863395432011235 after 901 seconds
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py37.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 68 tests in 4362.922s
FAILED (SKIP=6, failures=1)
> Task :sdks:python:test-suites:dataflow:py37:postCommitIT FAILED
FAILURE: Build failed with an exception.
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 118
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/6.7.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 17m 8s
214 actionable tasks: 153 executed, 57 from cache, 4 up-to-date
Gradle was unable to watch the file system for changes. The inotify watches
limit is too low.
Publishing build scan...
https://gradle.com/s/v6nni3dn7xn7k
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]