See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/135/changes>
Changes:
[robertwb] Minor cdef value changes.
------------------------------------------
[...truncated 3140 lines...]
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type":
"TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==",
"component_encodings": []
},
{
"@type": "SingletonCoder$<string of 344 bytes>",
"component_encodings": []
}
],
"is_wrapper": true
}
]
},
"output_name": "out",
"user_name":
"write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s10"
},
"user_name":
"write/WriteImpl/ViewAsIterable(write|WriteImpl|write_bundles.None)/CreatePCollectionView"
}
},
{
"kind": "ParallelDo",
"name": "s12",
"properties": {
"non_parallel_inputs": {
"s11": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s11"
},
"s9": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s9"
}
},
"output_info": [
{
"encoding": {
"@type": "WindowedValueCoder$<string of 736 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 172 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 172 bytes>",
"component_encodings": []
},
{
"@type": "FastPrimitivesCoder$<string of 172 bytes>",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type":
"TimestampCoder$gAJjYXBhY2hlX2JlYW0uY29kZXJzLmNvZGVycwpUaW1lc3RhbXBDb2RlcgpxACmBcQF9cQJiLg==",
"component_encodings": []
},
{
"@type": "SingletonCoder$<string of 344 bytes>",
"component_encodings": []
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/WriteImpl/finalize_write.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s7"
},
"serialized_fn": "<string of 1496 bytes>",
"user_name": "write/WriteImpl/finalize_write"
}
}
],
"type": "JOB_TYPE_BATCH"
}
INFO:root:Create job: <Job
id: u'2016-07-21_17_45_32-15704347142994509983'
projectId: u'apache-beam-testing'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:root:Created job with id: [2016-07-21_17_45_32-15704347142994509983]
INFO:root:To access the Dataflow monitoring console, please navigate to
https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-07-21_17_45_32-15704347142994509983
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010842a:
2016-07-22T00:45:33.098Z: JOB_MESSAGE_DETAILED: (da22568e39a75de): Checking
required Cloud APIs are enabled.
INFO:root:Job 2016-07-21_17_45_32-15704347142994509983 is in state
JOB_STATE_RUNNING
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108847:
2016-07-22T00:45:34.151Z: JOB_MESSAGE_DEBUG: (da22568e39a72bf): Combiner
lifting skipped for step group: GroupByKey not followed by a combiner.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108849:
2016-07-22T00:45:34.153Z: JOB_MESSAGE_DETAILED: (da22568e39a7af1): Expanding
GroupByKey operations into optimizable parts.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010884c:
2016-07-22T00:45:34.156Z: JOB_MESSAGE_DETAILED: (da22568e39a7323): Lifting
ValueCombiningMappingFns into MergeBucketsMappingFns
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108850:
2016-07-22T00:45:34.160Z: JOB_MESSAGE_DETAILED: (da22568e39a7387): Annotating
graph with Autotuner information.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010886d:
2016-07-22T00:45:34.189Z: JOB_MESSAGE_DETAILED: (da22568e39a744f): Fusing
adjacent ParDo, Read, Write, and Flatten operations
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108870:
2016-07-22T00:45:34.192Z: JOB_MESSAGE_DETAILED: (da22568e39a7c81): Fusing
consumer split into read
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108872:
2016-07-22T00:45:34.194Z: JOB_MESSAGE_DETAILED: (da22568e39a74b3): Fusing
consumer group/Reify into pair_with_one
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108874:
2016-07-22T00:45:34.196Z: JOB_MESSAGE_DETAILED: (da22568e39a7ce5): Fusing
consumer format into count
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108876:
2016-07-22T00:45:34.198Z: JOB_MESSAGE_DETAILED: (da22568e39a7517): Fusing
consumer count into group/GroupByWindow
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010887b:
2016-07-22T00:45:34.203Z: JOB_MESSAGE_DETAILED: (da22568e39a757b): Fusing
consumer pair_with_one into split
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010887d:
2016-07-22T00:45:34.205Z: JOB_MESSAGE_DETAILED: (da22568e39a7dad): Fusing
consumer group/GroupByWindow into group/Read
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108880:
2016-07-22T00:45:34.208Z: JOB_MESSAGE_DETAILED: (da22568e39a75df): Fusing
consumer write/WriteImpl/write_bundles into format
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108883:
2016-07-22T00:45:34.211Z: JOB_MESSAGE_DETAILED: (da22568e39a7e11): Fusing
consumer group/Write into group/Reify
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088b6:
2016-07-22T00:45:34.262Z: JOB_MESSAGE_DEBUG: (da22568e39a725d): Workflow config
is missing a default resource spec.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088b8:
2016-07-22T00:45:34.264Z: JOB_MESSAGE_DETAILED: (da22568e39a7a8f): Adding
StepResource setup and teardown to workflow graph.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088dc:
2016-07-22T00:45:34.300Z: JOB_MESSAGE_DEBUG: (d5c81a223fc34377): Adding
workflow start and stop steps.
INFO:root:2016-07-21_17_45_32-15704347142994509983_00000156101088e7:
2016-07-22T00:45:34.311Z: JOB_MESSAGE_DEBUG: (7667f0b2f9f0e688): Assigning
stage ids.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108947:
2016-07-22T00:45:34.407Z: JOB_MESSAGE_DEBUG: (c7b25d2b46bbb633): Executing wait
step start2
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108950:
2016-07-22T00:45:34.416Z: JOB_MESSAGE_DEBUG: (e35571d571f641c5): Executing
operation write/WriteImpl/DoOnce
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108953:
2016-07-22T00:45:34.419Z: JOB_MESSAGE_BASIC: S02: (9cd1197ee1aab0da): Executing
operation group/Create
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561010895c:
2016-07-22T00:45:34.428Z: JOB_MESSAGE_DEBUG: (d5c81a223fc34453): Value
"write/WriteImpl/DoOnce.out" materialized.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108966:
2016-07-22T00:45:34.438Z: JOB_MESSAGE_BASIC: S01: (6e24cb682a587192): Executing
operation write/WriteImpl/initialize_write
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a1f:
2016-07-22T00:45:34.623Z: JOB_MESSAGE_DEBUG: (8e74804a765e239f): Starting
worker pool setup.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a22:
2016-07-22T00:45:34.626Z: JOB_MESSAGE_BASIC: (8e74804a765e27d5): Starting 1
workers...
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a32:
2016-07-22T00:45:34.642Z: JOB_MESSAGE_DEBUG: (e35571d571f6462f): Value
"group/Session" materialized.
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610108a3c:
2016-07-22T00:45:34.652Z: JOB_MESSAGE_BASIC: S03: (592622882b970b48): Executing
operation read+split+pair_with_one+group/Reify+group/Write
INFO:root:2016-07-21_17_45_32-15704347142994509983_0000015610114bda:
2016-07-22T00:46:24.218Z: JOB_MESSAGE_DETAILED: (ece1e6d99886c79e): Workers
have started successfully.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a09a:
2016-07-22T00:47:51.450Z: JOB_MESSAGE_ERROR: (89b961e010ca0d5e): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a4cb:
2016-07-22T00:47:52.523Z: JOB_MESSAGE_ERROR: (6fae018b96d726fe): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012a917:
2016-07-22T00:47:53.623Z: JOB_MESSAGE_ERROR: (812f831deabb914b): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b13c:
2016-07-22T00:47:55.708Z: JOB_MESSAGE_ERROR: (7dee846727b71bd2): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b57a:
2016-07-22T00:47:56.794Z: JOB_MESSAGE_ERROR: (22a33cb749f05a09): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9bb:
2016-07-22T00:47:57.883Z: JOB_MESSAGE_ERROR: (dd4fa0308c8ab691): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9e4:
2016-07-22T00:47:57.924Z: JOB_MESSAGE_DEBUG: (6251030b36fbb057): Executing
failure step failure1
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012b9e7:
2016-07-22T00:47:57.927Z: JOB_MESSAGE_ERROR: (6251030b36fbb701): Workflow
failed. Causes: (592622882b970f63):
S03:read+split+pair_with_one+group/Reify+group/Write failed.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012ba22:
2016-07-22T00:47:57.986Z: JOB_MESSAGE_DETAILED: (6e8d0c8e21a631f4): Cleaning up.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012baa2:
2016-07-22T00:47:58.114Z: JOB_MESSAGE_DEBUG: (6e8d0c8e21a63dd7): Starting
worker pool teardown.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012baa5:
2016-07-22T00:47:58.117Z: JOB_MESSAGE_BASIC: (6e8d0c8e21a63b19): Stopping
worker pool...
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561012bdf3:
2016-07-22T00:47:58.963Z: JOB_MESSAGE_DETAILED: (c2d3412301a1c28c): Failed to
publish the result of the work update. Causes: (c2d3412301a1cc53): Failed to
update work status. Causes: (14133f7d32eb9074): Work "1446569712273821742" not
leased (or the lease was lost).
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561013f32c:
2016-07-22T00:49:18.124Z: JOB_MESSAGE_BASIC: (6e8d0c8e21a636fc): Worker pool
stopped.
INFO:root:2016-07-21_17_45_32-15704347142994509983_000001561013f78e:
2016-07-22T00:49:19.246Z: JOB_MESSAGE_DEBUG: (6e8d0c8e21a6326b): Tearing down
pending resources...
INFO:root:Job 2016-07-21_17_45_32-15704347142994509983 is in state
JOB_STATE_FAILED
Traceback (most recent call last):
File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File
"<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",>
line 102, in <module>
run()
File
"<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",>
line 93, in run
result = p.run()
File "apache_beam/pipeline.py", line 159, in run
return self.runner.run(self)
File "apache_beam/runners/dataflow_runner.py", line 188, in run
% getattr(self, 'last_error_msg', None), self.result)
apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline
failed:
(dd4fa0308c8ab691): Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 462, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 891, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:24041)
op.start()
File "dataflow_worker/executor.py", line 477, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14398)
def start(self):
File "dataflow_worker/executor.py", line 508, in
dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14333)
self.dofn_runner.start()
File "apache_beam/runners/common.py", line 92, in
apache_beam.runners.common.DoFnRunner.start (apache_beam/runners/common.c:3281)
self.logging_context.exit()
AttributeError: 'PerThreadLoggingContext' object has no attribute 'exit'
# Grep will exit with status 1 if success message was not found.
echo ">>> CHECKING JOB SUCCESS"
>>> CHECKING JOB SUCCESS
grep JOB_STATE_DONE job_output
Build step 'Execute shell' marked build as failure