See
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/3900/display/redirect?page=changes>
Changes:
[ankurgoenka] [BEAM-3411] Fixing KeyError when on progress update request
------------------------------------------
[...truncated 1.19 MB...]
"@type": "kind:stream",
"component_encodings": [
{
"@type":
"VarIntCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxhiUWeeSXOIA5XIYNmYyFjbSFTkh4A89cR+g==",
"component_encodings": []
}
],
"is_stream_like": true
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "monthly count/GroupByKey.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s2"
},
"serialized_fn":
"%0AJ%22H%0A%1Dref_Coder_GlobalWindowCoder_1%12%27%0A%25%0A%23%0A%21urn%3Abeam%3Acoders%3Aglobal_window%3A0.1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01",
"user_name": "monthly count/GroupByKey"
}
},
{
"kind": "CombineValues",
"name": "s4",
"properties": {
"display_data": [],
"encoding": {
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "monthly count/Combine.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s3"
},
"serialized_fn": "<string of 236 bytes>",
"user_name": "monthly count/Combine"
}
},
{
"kind": "ParallelDo",
"name": "s5",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "<lambda>"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "format.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s4"
},
"serialized_fn": "<string of 1012 bytes>",
"user_name": "format"
}
},
{
"kind": "ParallelWrite",
"name": "s6",
"properties": {
"create_disposition": "CREATE_IF_NEEDED",
"dataset": "BigQueryTornadoesIT",
"display_data": [],
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"RowAsDictJsonCoder$eNprYEpOLEhMzkiNT0pNzNXLzNdLTy7QS8pMLyxNLarkCsovdyx2yUwu8SrOz3POT0kt4ipk0GwsZKwtZErSAwCu1BVY",
"component_encodings": []
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"format": "bigquery",
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s5"
},
"schema": "{\"fields\": [{\"type\": \"INTEGER\", \"name\": \"month\",
\"mode\": \"NULLABLE\"}, {\"type\": \"INTEGER\", \"name\": \"tornado_count\",
\"mode\": \"NULLABLE\"}]}",
"table": "monthly_tornadoes_1515400307393",
"user_name": "Write/WriteToBigQuery/NativeWrite",
"write_disposition": "WRITE_TRUNCATE"
}
}
],
"type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
createTime: u'2018-01-08T08:31:48.819322Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2018-01-08_00_31_48-173210535311791867'
location: u'us-central1'
name: u'beamapp-jenkins-0108083147-433838'
projectId: u'apache-beam-testing'
stageStates: []
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-08_00_31_48-173210535311791867]
root: INFO: To access the Dataflow monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-08_00_31_48-173210535311791867?project=apache-beam-testing
root: INFO: Job 2018-01-08_00_31_48-173210535311791867 is in state
JOB_STATE_PENDING
root: INFO: 2018-01-08T08:31:48.257Z: JOB_MESSAGE_WARNING: (2675e106c9d586f):
Setting the number of workers (1) disables autoscaling for this job. If you are
trying to cap autoscaling, consider only setting max_num_workers. If you want
to disable autoscaling altogether, the documented way is to explicitly use
autoscalingAlgorithm=NONE.
root: INFO: 2018-01-08T08:31:50.822Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3f12):
Checking required Cloud APIs are enabled.
root: INFO: 2018-01-08T08:31:51.448Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c313b):
Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-08T08:31:51.474Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c37e2):
Expanding GroupByKey operations into optimizable parts.
root: INFO: 2018-01-08T08:31:51.508Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c36fc):
Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2018-01-08T08:31:51.545Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c3530):
Annotating graph with Autotuner information.
root: INFO: 2018-01-08T08:31:51.654Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3af1):
Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-08T08:31:51.682Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3a0b):
Fusing consumer months with tornadoes into read
root: INFO: 2018-01-08T08:31:51.716Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3925):
Fusing consumer monthly count/GroupByKey/Reify into monthly
count/GroupByKey+monthly count/Combine/Partial
root: INFO: 2018-01-08T08:31:51.748Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c383f):
Fusing consumer format into monthly count/Combine/Extract
root: INFO: 2018-01-08T08:31:51.771Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3759):
Fusing consumer monthly count/Combine/Extract into monthly count/Combine
root: INFO: 2018-01-08T08:31:51.801Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c3673):
Fusing consumer Write/WriteToBigQuery/NativeWrite into format
root: INFO: 2018-01-08T08:31:51.834Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c358d):
Fusing consumer monthly count/Combine into monthly count/GroupByKey/Read
root: INFO: 2018-01-08T08:31:51.866Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c34a7):
Fusing consumer monthly count/GroupByKey+monthly count/Combine/Partial into
months with tornadoes
root: INFO: 2018-01-08T08:31:51.891Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c33c1):
Fusing consumer monthly count/GroupByKey/Write into monthly
count/GroupByKey/Reify
root: INFO: 2018-01-08T08:31:51.913Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c32db):
Workflow config is missing a default resource spec.
root: INFO: 2018-01-08T08:31:51.938Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c31f5):
Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-01-08T08:31:51.969Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c310f):
Adding workflow start and stop steps.
root: INFO: 2018-01-08T08:31:51.996Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c3029):
Assigning stage ids.
root: INFO: 2018-01-08T08:31:52.153Z: JOB_MESSAGE_DEBUG: (27ef56c0985ae006):
Executing wait step start22
root: INFO: 2018-01-08T08:31:52.222Z: JOB_MESSAGE_BASIC: (27ef56c0985aef0f):
Executing operation monthly count/GroupByKey/Create
root: INFO: 2018-01-08T08:31:52.263Z: JOB_MESSAGE_DEBUG: (bbab4ff070daebd):
Starting worker pool setup.
root: INFO: 2018-01-08T08:31:52.292Z: JOB_MESSAGE_BASIC: (bbab4ff070da913):
Starting 1 workers in us-central1-f...
root: INFO: 2018-01-08T08:31:52.381Z: JOB_MESSAGE_DEBUG: (27ef56c0985aed21):
Value "monthly count/GroupByKey/Session" materialized.
root: INFO: 2018-01-08T08:31:52.440Z: JOB_MESSAGE_BASIC: (27ef56c0985aeec9):
Executing operation read+months with tornadoes+monthly count/GroupByKey+monthly
count/Combine/Partial+monthly count/GroupByKey/Reify+monthly
count/GroupByKey/Write
root: INFO: 2018-01-08T08:31:53.034Z: JOB_MESSAGE_BASIC: (862b35649ab90293):
BigQuery export job "dataflow_job_9667879731289722001" started. You can check
its status with the bq tool: "bq show -j --project_id=clouddataflow-readonly
dataflow_job_9667879731289722001".
root: INFO: Job 2018-01-08_00_31_48-173210535311791867 is in state
JOB_STATE_RUNNING
root: INFO: 2018-01-08T08:32:00.909Z: JOB_MESSAGE_DETAILED: (a2a3e3eabcdc185c):
Autoscaling: Raised the number of workers to 0 based on the rate of progress in
the currently running step(s).
root: INFO: 2018-01-08T08:32:23.398Z: JOB_MESSAGE_DETAILED: (30dada26d0152e3e):
BigQuery export job progress: "dataflow_job_9667879731289722001" observed total
of 1 exported files thus far.
root: INFO: 2018-01-08T08:32:23.429Z: JOB_MESSAGE_BASIC: (30dada26d01522f4):
BigQuery export job finished: "dataflow_job_9667879731289722001"
root: INFO: 2018-01-08T08:32:30.637Z: JOB_MESSAGE_ERROR: (a2a3e3eabcdc1709):
Startup of the worker pool in zone us-central1-f failed to bring up any of the
desired 1 workers. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded. Limit:
21000.0 in region us-central1.
root: INFO: 2018-01-08T08:32:30.671Z: JOB_MESSAGE_ERROR: (a2a3e3eabcdc1ce7):
Workflow failed.
root: INFO: 2018-01-08T08:32:30.843Z: JOB_MESSAGE_DETAILED: (bb4d1995d62c341e):
Cleaning up.
root: INFO: 2018-01-08T08:32:30.889Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c3252):
Starting worker pool teardown.
root: INFO: 2018-01-08T08:32:30.920Z: JOB_MESSAGE_BASIC: (bb4d1995d62c316c):
Stopping worker pool...
root: INFO: 2018-01-08T08:33:39.695Z: JOB_MESSAGE_BASIC: (bb4d1995d62c3dd4):
Worker pool stopped.
root: INFO: 2018-01-08T08:33:39.753Z: JOB_MESSAGE_DEBUG: (bb4d1995d62c3c08):
Tearing down pending resources...
root: INFO: Job 2018-01-08_00_31_48-173210535311791867 is in state
JOB_STATE_FAILED
root: INFO: Clean up a Bigquery table with project: apache-beam-testing,
dataset: BigQueryTornadoesIT, table: monthly_tornadoes_1515400307393.
google.auth.transport._http_client: DEBUG: Making request: GET
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/project/project-id
google_auth_httplib2: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
google_auth_httplib2: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
Ran 3 tests in 122.779s
FAILED (errors=4)
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]