See
<https://ci-beam.apache.org/job/beam_PostCommit_Python38/843/display/redirect?page=changes>
Changes:
[zyichi] Setup InfluxDbIO_IT jenkins job cron
[Kyle Weaver] [BEAM-10379] Remove BIT_XOR from ZetaSQL supported functions list.
[Kyle Weaver] [BEAM-11732] Revert flink-clients from runtime to compile
configuration.
[noreply] [BEAM-11731] Restrict to numpy <1.20.0 (#13870)
[noreply] [BEAM-11357] Copy Annotations when cloning PTransforms (#13865)
[noreply] [BEAM-11693] Update formatting. Fix email template (#13815)
------------------------------------------
[...truncated 37.67 MB...]
},
{
"kind": "ParallelDo",
"name": "s47",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "WaitForBQJobs",
"type": "STRING",
"value": "apache_beam.io.gcp.bigquery_file_loads.WaitForBQJobs"
}
],
"non_parallel_inputs": {
"python_side_input0-write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s46"
}
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
},
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s45"
},
"serialized_fn":
"QlpoOTFBWSZTWY53WkoAAif/4/f/+Cvha+gQPf//wr/v/6Li1A4EAEAQCAAAwAK9Ca0M2IaJBpNMTQABomamQyaGIAyBpo0NBkxNAADRR6mqeyap+mqepsoAAD1AAMg0AAAAAAA4NGjQNBoDJiAyNDIAA00yAAAYIACSIIp5EYJpplMmT0Q00aAANADQANHppDJp6giy3E3EsuF0bzHEwhDtyMn5EV/txKG18Pp8vst904YBmFUJxIy52QEJWqZtFpUN6IMCow8y0sKr77T5jHgmYpAzrtKYDHOy4L2VYoTkCNXY6vFdM17aYQGXcjcuKCDmd55VDT3HsW6bEobbLRibJuKJFBDaF0hItydOg+pGBCVqAYw0oBm3clgohEF3SDgEcXp3Beu3byrFoYZmGy7LhmwJcgVhGGheaBZBB2PPt8GMqpJwaDkVlrdI4SuoYPM1yG1CVNE0zdGIUTeylaYXQjoiM4zqIWabTXsA/UERjEjLZSMEY7ZPSxShqQPYoOphoMKDg+ABDIjwmIpZBrRIuWQJK6YGHqc0wzrJpW1AJNOWEvelwusmEWKWu/W1eyFpUXL+taCFSpSg4Mxka6K8sjMwZsOhRsWYsygMc5SCURiLRELvlJmEvNGGnlxx3uxrjX6tmNY1Rvq7XGwTCEHSJlh5Y5I67Tki6E6E8ZJGBmZRyubrDUhIgDGsPS5Q0MllmRkLiWRGnqwoHqfYIlysMU/PsossiitWxF7vGJgwOYJKCsMte9nGHSMFAbFUc9pZJnOjzKXCZp0vBkBQ1ZzmJ4o6fXBHQc4Fth2ixamRMZK9Hg3I1DmoJYcyY4EEBBpRGDSOm4sBr6dkxhMNSAbRyFCj6kRk9S5Y8wfKlR0+qBNbFGfIuUyEMTkBqfFUORkjpFAyD/MnraWv5kFTFWIRoYkX2S/1XAJQWqweBsrBuwdX2Pvw+6EmOV+N6BRKUGojBFDO0mWgwnZIONGYSu6Opxw28xmpzQcx2agfcjUWKUiuWaiEKJsO+pn0xi3ZYrolmjuM1LzsW7MmtNGVhMNQlm0QjWN1lc7/5Wk/r/9/i7kinChIRzutJQA=",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs"
}
},
{
"kind": "Flatten",
"name": "s48",
"properties": {
"display_data": [],
"inputs": [
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s28"
},
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s44"
}
],
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
},
{
"@type":
"FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_3"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/BigQueryBatchFileLoads/Flatten.out"
}
],
"user_name": "write/BigQueryBatchFileLoads/Flatten"
}
}
],
"type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: '2021-02-02T00:55:45.366821Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2021-02-01_16_55_43-15414403974414577320'
location: 'us-central1'
name: 'beamapp-jenkins-0202005534-789475'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2021-02-02T00:55:45.366821Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2021-02-01_16_55_43-15414403974414577320]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job:
2021-02-01_16_55_43-15414403974414577320
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2021-02-01_16_55_43-15414403974414577320?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2021-02-01_16_55_43-15414403974414577320 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:46.376Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2021-02-01_16_55_43-15414403974414577320. The number of workers will be between
1 and 1000.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:46.495Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2021-02-01_16_55_43-15414403974414577320.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.034Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-f.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.593Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.623Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables: GroupByKey not
followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.644Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations: GroupByKey not
followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.665Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
write/BigQueryBatchFileLoads/GroupShardedRows: GroupByKey not followed by a
combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.694Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.713Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.814Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.867Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.904Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s23 for input s17.WrittenFiles
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:49.932Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
write/BigQueryBatchFileLoads/IdentityWorkaround, through flatten
write/BigQueryBatchFileLoads/DestinationFilesUnion, into producer
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.028Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Write into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.052Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.073Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles) into
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.099Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/TriggerLoadJobsWithTempTables/ParDo(TriggerLoadJobs)/ParDo(TriggerLoadJobs)
into write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.117Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/TriggerLoadJobsWithoutTempTables/TriggerLoadJobsWithoutTempTables
into write/BigQueryBatchFileLoads/ParDo(PartitionFiles)/ParDo(PartitionFiles)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.141Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s23-u32 for input s24.None-c30
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.166Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify, through
flatten write/BigQueryBatchFileLoads/DestinationFilesUnion/Unzipped-1, into
producer write/BigQueryBatchFileLoads/IdentityWorkaround
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.189Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/IdentityWorkaround into
write/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.204Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Reify into
write/BigQueryBatchFileLoads/IdentityWorkaround
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.227Z:
JOB_MESSAGE_DETAILED: Fusing consumer
read/_PassThroughThenCleanup/ParDo(PassThrough)/ParDo(PassThrough) into
read/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.250Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RewindowIntoGlobal into
read/_PassThroughThenCleanup/ParDo(PassThrough)/ParDo(PassThrough)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.274Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/AppendDestination into
write/BigQueryBatchFileLoads/RewindowIntoGlobal
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.296Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
into write/BigQueryBatchFileLoads/AppendDestination
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.321Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(_ShardDestinations) into
write/BigQueryBatchFileLoads/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)/ParDo(WriteRecordsToFile)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.341Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/Reify into
write/BigQueryBatchFileLoads/ParDo(_ShardDestinations)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.364Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/Write into
write/BigQueryBatchFileLoads/GroupShardedRows/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.387Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow into
write/BigQueryBatchFileLoads/GroupShardedRows/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.412Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/DropShardNumber into
write/BigQueryBatchFileLoads/GroupShardedRows/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.430Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WriteGroupedRecordsToFile/WriteGroupedRecordsToFile
into write/BigQueryBatchFileLoads/DropShardNumber
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.481Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/LoadJobNamePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.501Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/CopyJobNamePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.524Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/GenerateFilePrefix into
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.540Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
into write/BigQueryBatchFileLoads/ImpulseMonitorLoadJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.566Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/ParDo(TriggerCopyJobs)/ParDo(TriggerCopyJobs) into
write/BigQueryBatchFileLoads/WaitForTempTableLoadJobs/WaitForTempTableLoadJobs
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.587Z:
JOB_MESSAGE_DETAILED: Fusing consumer read/MapFilesToRemove into
read/FilesToRemoveImpulse/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.612Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs into
write/BigQueryBatchFileLoads/ImpulseMonitorCopyJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.646Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables into
write/BigQueryBatchFileLoads/WaitForCopyJobs/WaitForCopyJobs
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.672Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue into
write/BigQueryBatchFileLoads/RemoveTempTables/PassTables/PassTables
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.692Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify into
write/BigQueryBatchFileLoads/RemoveTempTables/AddUselessValue
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.708Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Write into
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Reify
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.731Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
into write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.756Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames into
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/GroupByWindow
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.780Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/RemoveTempTables/Delete into
write/BigQueryBatchFileLoads/RemoveTempTables/GetTableNames
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.806Z:
JOB_MESSAGE_DETAILED: Fusing consumer
write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs
into write/BigQueryBatchFileLoads/ImpulseMonitorDestLoadJobs/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.854Z:
JOB_MESSAGE_DETAILED: Fusing consumer
read/_PassThroughThenCleanup/ParDo(RemoveExtractedFiles)/ParDo(RemoveExtractedFiles)
into read/_PassThroughThenCleanup/Create/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.908Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.926Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.945Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:50.968Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.265Z:
JOB_MESSAGE_DEBUG: Executing wait step start49
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.304Z:
JOB_MESSAGE_BASIC: Executing operation
read/FilesToRemoveImpulse/Read+read/MapFilesToRemove
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.326Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/ImpulseSingleElementPC/Read+write/BigQueryBatchFileLoads/LoadJobNamePrefix+write/BigQueryBatchFileLoads/CopyJobNamePrefix+write/BigQueryBatchFileLoads/GenerateFilePrefix
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.334Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.349Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/GroupShardedRows/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.349Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.389Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.394Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/GroupShardedRows/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.413Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.426Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.435Z:
JOB_MESSAGE_BASIC: Executing operation
write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.455Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Create
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.480Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/GroupShardedRows/Session" materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.484Z:
JOB_MESSAGE_BASIC: Finished operation
write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.515Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/GroupFilesByTableDestinations/Session"
materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.539Z:
JOB_MESSAGE_DEBUG: Value
"write/BigQueryBatchFileLoads/RemoveTempTables/DeduplicateTables/Session"
materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:55:51.556Z:
JOB_MESSAGE_DEBUG: Value "write/BigQueryBatchFileLoads/ImpulseEmptyPC/Read.out"
materialized.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:56:19.636Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on
the rate of progress in the currently running stage(s).
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:56:21.637Z:
JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric
descriptors, so new user metrics of the form custom.googleapis.com/* will not
be created. However, all user metrics are also available in the metric
dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics,
you can delete old / unused metric descriptors. See
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:56:46.455Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-02-02T00:56:46.488Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
oauth2client.transport: INFO: Refreshing due to a 401 (attempt 1/2)
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting
for job 2021-02-01_16_55_43-15414403974414577320 after 903 seconds
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py38.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 68 tests in 5250.101s
FAILED (SKIP=6, failures=3)
> Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED
FAILURE: Build failed with an exception.
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 118
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/6.8/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 30m 50s
214 actionable tasks: 153 executed, 57 from cache, 4 up-to-date
Gradle was unable to watch the file system for changes. The inotify watches
limit is too low.
Publishing build scan...
https://gradle.com/s/p5zmjremjfwuu
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]