See
<https://builds.apache.org/job/beam_PerformanceTests_Python/768/display/redirect>
------------------------------------------
[...truncated 35.64 KB...]
},
{
"@type": "kind:stream",
"component_encodings": [
{
"@type":
"VarIntCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxhiUWeeSXOIA5XIYNmYyFjbSFTkh4A89cR+g==",
"component_encodings": []
}
],
"is_stream_like": true
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "group.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s3"
},
"serialized_fn":
"%0AJ%22H%0A%1Dref_Coder_GlobalWindowCoder_1%12%27%0A%25%0A%23%0A%21urn%3Abeam%3Acoders%3Aglobal_window%3A0.1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01",
"user_name": "group"
}
},
{
"kind": "ParallelDo",
"name": "s5",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "count_ones"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:pair",
"component_encodings": [
{
"@type":
"StrUtf8Coder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlzBJUWhJWkWziAeVyGDZmMhY20hU5IeAAajEkY=",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "count.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s4"
},
"serialized_fn": "<string of 1012 bytes>",
"user_name": "count"
}
},
{
"kind": "ParallelDo",
"name": "s6",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "format_result"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "format.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s5"
},
"serialized_fn": "<string of 1016 bytes>",
"user_name": "format"
}
}
],
"type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
createTime: u'2018-01-09T06:08:00.367132Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2018-01-08_22_07_59-15760962798203099408'
location: u'us-central1'
name: u'beamapp-jenkins-0109060757-786772'
projectId: u'apache-beam-testing'
stageStates: []
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2018-01-08_22_07_59-15760962798203099408]
root: INFO: To access the Dataflow monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2018-01-08_22_07_59-15760962798203099408?project=apache-beam-testing
root: INFO: Job 2018-01-08_22_07_59-15760962798203099408 is in state
JOB_STATE_PENDING
root: INFO: 2018-01-09T06:07:59.624Z: JOB_MESSAGE_DETAILED: (daba3028e90ad40c):
Autoscaling is enabled for job 2018-01-08_22_07_59-15760962798203099408. The
number of workers will be between 1 and 15.
root: INFO: 2018-01-09T06:07:59.648Z: JOB_MESSAGE_DETAILED: (daba3028e90ad2cb):
Autoscaling was automatically enabled for job
2018-01-08_22_07_59-15760962798203099408.
root: INFO: 2018-01-09T06:08:01.930Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4b23):
Checking required Cloud APIs are enabled.
root: INFO: 2018-01-09T06:08:03.168Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a42d6):
Expanding CollectionToSingleton operations into optimizable parts.
root: INFO: 2018-01-09T06:08:03.203Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4713):
Expanding CoGroupByKey operations into optimizable parts.
root: INFO: 2018-01-09T06:08:03.236Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4b50):
Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2018-01-09T06:08:03.259Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a48ce):
Expanding GroupByKey operations into optimizable parts.
root: INFO: 2018-01-09T06:08:03.291Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a464c):
Annotating graph with Autotuner information.
root: INFO: 2018-01-09T06:08:03.322Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4148):
Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2018-01-09T06:08:03.351Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4ec6):
Fusing consumer split into read/Read
root: INFO: 2018-01-09T06:08:03.370Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4c44):
Fusing consumer group/Reify into pair_with_one
root: INFO: 2018-01-09T06:08:03.397Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a49c2):
Fusing consumer format into count
root: INFO: 2018-01-09T06:08:03.424Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4740):
Fusing consumer count into group/GroupByWindow
root: INFO: 2018-01-09T06:08:03.456Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a44be):
Fusing consumer pair_with_one into split
root: INFO: 2018-01-09T06:08:03.487Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a423c):
Fusing consumer group/Write into group/Reify
root: INFO: 2018-01-09T06:08:03.499Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4fba):
Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2018-01-09T06:08:03.531Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4d38):
Workflow config is missing a default resource spec.
root: INFO: 2018-01-09T06:08:03.551Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4ab6):
Adding StepResource setup and teardown to workflow graph.
root: INFO: 2018-01-09T06:08:03.573Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4834):
Adding workflow start and stop steps.
root: INFO: 2018-01-09T06:08:03.596Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a45b2):
Assigning stage ids.
root: INFO: 2018-01-09T06:08:03.728Z: JOB_MESSAGE_DEBUG: (6148c3c41eb5ccfe):
Executing wait step start13
root: INFO: 2018-01-09T06:08:03.792Z: JOB_MESSAGE_BASIC: (372a283eb58767f4):
Executing operation group/Create
root: INFO: 2018-01-09T06:08:03.836Z: JOB_MESSAGE_DEBUG: (29c911f08379e73f):
Starting worker pool setup.
root: INFO: 2018-01-09T06:08:03.866Z: JOB_MESSAGE_BASIC: (29c911f08379ec11):
Starting 1 workers in us-central1-f...
root: INFO: 2018-01-09T06:08:04.047Z: JOB_MESSAGE_DEBUG: (d415aa131b8daf15):
Value "group/Session" materialized.
root: INFO: 2018-01-09T06:08:04.105Z: JOB_MESSAGE_BASIC: (372a283eb58765e2):
Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: Job 2018-01-08_22_07_59-15760962798203099408 is in state
JOB_STATE_RUNNING
root: INFO: 2018-01-09T06:08:12.768Z: JOB_MESSAGE_DETAILED: (173322534a71329f):
Autoscaling: Raised the number of workers to 0 based on the rate of progress in
the currently running step(s).
root: INFO: 2018-01-09T06:08:52.393Z: JOB_MESSAGE_ERROR: (173322534a713c3e):
Startup of the worker pool in zone us-central1-f failed to bring up any of the
desired 1 workers. QUOTA_EXCEEDED: Quota 'DISKS_TOTAL_GB' exceeded. Limit:
21000.0 in region us-central1.
root: INFO: 2018-01-09T06:08:52.427Z: JOB_MESSAGE_ERROR: (173322534a713f60):
Workflow failed.
root: INFO: 2018-01-09T06:08:52.571Z: JOB_MESSAGE_DETAILED: (b039dfa3b84a4269):
Cleaning up.
root: INFO: 2018-01-09T06:08:52.617Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4d65):
Starting worker pool teardown.
root: INFO: 2018-01-09T06:08:52.645Z: JOB_MESSAGE_BASIC: (b039dfa3b84a4ae3):
Stopping worker pool...
root: INFO: 2018-01-09T06:10:07.453Z: JOB_MESSAGE_DEBUG: (b039dfa3b84a4e59):
Tearing down pending resources...
root: INFO: Job 2018-01-08_22_07_59-15760962798203099408 is in state
JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
Ran 2 tests in 556.610s
FAILED (errors=1)
2018-01-09 06:17:14,329 4aca2ce5 MainThread beam_integration_benchmark(1/1)
ERROR Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
File
"<https://builds.apache.org/job/beam_PerformanceTests_Python/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 601, in RunBenchmark
DoRunPhase(spec, collector, detailed_timer)
File
"<https://builds.apache.org/job/beam_PerformanceTests_Python/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 504, in DoRunPhase
samples = spec.BenchmarkRun(spec)
File
"<https://builds.apache.org/job/beam_PerformanceTests_Python/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",>
line 159, in Run
job_type=job_type)
File
"<https://builds.apache.org/job/beam_PerformanceTests_Python/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",>
line 90, in SubmitJob
assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-01-09 06:17:14,330 4aca2ce5 MainThread beam_integration_benchmark(1/1)
INFO Cleaning up benchmark beam_integration_benchmark
2018-01-09 06:17:14,332 4aca2ce5 MainThread beam_integration_benchmark(1/1)
ERROR Benchmark 1/1 beam_integration_benchmark (UID:
beam_integration_benchmark0) failed. Execution will continue.
2018-01-09 06:17:14,369 4aca2ce5 MainThread INFO Benchmark run statuses:
---------------------------------------------------------------
Name UID Status
---------------------------------------------------------------
beam_integration_benchmark beam_integration_benchmark0 FAILED
---------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-01-09 06:17:14,370 4aca2ce5 MainThread INFO Complete logs can be found
at: /tmp/perfkitbenchmarker/runs/4aca2ce5/pkb.log
2018-01-09 06:17:14,370 4aca2ce5 MainThread INFO Completion statuses can be
found at: /tmp/perfkitbenchmarker/runs/4aca2ce5/completion_statuses.json
Build step 'Execute shell' marked build as failure
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]