See <https://builds.apache.org/job/beam_PostCommit_PythonVerify/566/>
------------------------------------------ [...truncated 2850 lines...] } ], "is_wrapper": true } ] }, "output_name": "out", "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|FlatMap(<lambda at iobase.py:758>).None)/CreatePCollectionView.out" } ], "parallel_input": { "@type": "OutputReference", "output_name": "out", "step_name": "s14" }, "user_name": "write/WriteImpl/ViewAsIterable(write|WriteImpl|FlatMap(<lambda at iobase.py:758>).None)/CreatePCollectionView" } }, { "kind": "ParallelDo", "name": "s16", "properties": { "non_parallel_inputs": { "s15": { "@type": "OutputReference", "output_name": "out", "step_name": "s15" }, "s9": { "@type": "OutputReference", "output_name": "out", "step_name": "s9" } }, "output_info": [ { "encoding": { "@type": "WindowedValueCoder$<string of 408 bytes>", "component_encodings": [ { "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", "component_encodings": [ { "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", "component_encodings": [] }, { "@type": "FastPrimitivesCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/", "component_encodings": [] } ], "is_pair_like": true }, { "@type": "TimestampCoder$eJxrYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlwhmbmpxSWJuQXOID5XIYNmYyFjbSFTkh4ANWETWg==", "component_encodings": [] }, { "@type": "SingletonCoder$<string of 252 bytes>", "component_encodings": [] } ], "is_wrapper": true }, "output_name": "out", "user_name": "write/WriteImpl/finalize_write.out" } ], "parallel_input": { "@type": "OutputReference", "output_name": "out", "step_name": "s7" }, "serialized_fn": "<string of 1292 bytes>", "user_name": "write/WriteImpl/finalize_write" } } ], "type": "JOB_TYPE_BATCH" } INFO:root:Create job: <Job id: u'2016-10-18_14_04_17-7862377147606629765' projectId: u'apache-beam-testing' steps: [] tempFiles: [] type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)> INFO:root:Created job with id: [2016-10-18_14_04_17-7862377147606629765] INFO:root:To access the Dataflow monitoring console, please navigate to https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2016-10-18_14_04_17-7862377147606629765 INFO:root:Job 2016-10-18_14_04_17-7862377147606629765 is in state JOB_STATE_RUNNING INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf356: 2016-10-18T21:04:18.774Z: JOB_MESSAGE_DETAILED: (2787e482e4005483): Checking required Cloud APIs are enabled. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf467: 2016-10-18T21:04:19.047Z: JOB_MESSAGE_DEBUG: (2787e482e4005f84): Combiner lifting skipped for step write/WriteImpl/GroupByKey: GroupByKey not followed by a combiner. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf46a: 2016-10-18T21:04:19.050Z: JOB_MESSAGE_DEBUG: (2787e482e40051f6): Combiner lifting skipped for step group: GroupByKey not followed by a combiner. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf46c: 2016-10-18T21:04:19.052Z: JOB_MESSAGE_DETAILED: (2787e482e4005468): Expanding GroupByKey operations into optimizable parts. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf46f: 2016-10-18T21:04:19.055Z: JOB_MESSAGE_DETAILED: (2787e482e40056da): Lifting ValueCombiningMappingFns into MergeBucketsMappingFns INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf476: 2016-10-18T21:04:19.062Z: JOB_MESSAGE_DETAILED: (2787e482e4005e30): Annotating graph with Autotuner information. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf497: 2016-10-18T21:04:19.095Z: JOB_MESSAGE_DETAILED: (2787e482e40057f8): Fusing adjacent ParDo, Read, Write, and Flatten operations INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf49b: 2016-10-18T21:04:19.099Z: JOB_MESSAGE_DETAILED: (2787e482e4005a6a): Fusing consumer split into read INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf49d: 2016-10-18T21:04:19.101Z: JOB_MESSAGE_DETAILED: (2787e482e4005cdc): Fusing consumer group/Reify into pair_with_one INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4a0: 2016-10-18T21:04:19.104Z: JOB_MESSAGE_DETAILED: (2787e482e4005f4e): Fusing consumer format into count INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4a2: 2016-10-18T21:04:19.106Z: JOB_MESSAGE_DETAILED: (2787e482e40051c0): Fusing consumer write/WriteImpl/GroupByKey/GroupByWindow into write/WriteImpl/GroupByKey/Read INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4a4: 2016-10-18T21:04:19.108Z: JOB_MESSAGE_DETAILED: (2787e482e4005432): Fusing consumer write/WriteImpl/GroupByKey/Write into write/WriteImpl/GroupByKey/Reify INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4a9: 2016-10-18T21:04:19.113Z: JOB_MESSAGE_DETAILED: (2787e482e4005916): Fusing consumer write/WriteImpl/FlatMap(<lambda at iobase.py:758>) into write/WriteImpl/GroupByKey/GroupByWindow INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4b0: 2016-10-18T21:04:19.120Z: JOB_MESSAGE_DETAILED: (2787e482e4005b88): Fusing consumer count into group/GroupByWindow INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4b2: 2016-10-18T21:04:19.122Z: JOB_MESSAGE_DETAILED: (2787e482e4005dfa): Fusing consumer write/WriteImpl/WindowInto into write/WriteImpl/Map(<lambda at iobase.py:755>) INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4b5: 2016-10-18T21:04:19.125Z: JOB_MESSAGE_DETAILED: (2787e482e400506c): Fusing consumer write/WriteImpl/GroupByKey/Reify into write/WriteImpl/WindowInto INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4b7: 2016-10-18T21:04:19.127Z: JOB_MESSAGE_DETAILED: (2787e482e40052de): Fusing consumer write/WriteImpl/Map(<lambda at iobase.py:755>) into write/WriteImpl/write_bundles INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4b9: 2016-10-18T21:04:19.129Z: JOB_MESSAGE_DETAILED: (2787e482e4005550): Fusing consumer pair_with_one into split INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4bc: 2016-10-18T21:04:19.132Z: JOB_MESSAGE_DETAILED: (2787e482e40057c2): Fusing consumer group/GroupByWindow into group/Read INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4be: 2016-10-18T21:04:19.134Z: JOB_MESSAGE_DETAILED: (2787e482e4005a34): Fusing consumer write/WriteImpl/write_bundles into format INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf4c0: 2016-10-18T21:04:19.136Z: JOB_MESSAGE_DETAILED: (2787e482e4005ca6): Fusing consumer group/Write into group/Reify INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf505: 2016-10-18T21:04:19.205Z: JOB_MESSAGE_DEBUG: (2787e482e4005ae6): Workflow config is missing a default resource spec. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf507: 2016-10-18T21:04:19.207Z: JOB_MESSAGE_DETAILED: (2787e482e4005d58): Adding StepResource setup and teardown to workflow graph. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf524: 2016-10-18T21:04:19.236Z: JOB_MESSAGE_DEBUG: (6ea0de46e224f918): Adding workflow start and stop steps. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf550: 2016-10-18T21:04:19.280Z: JOB_MESSAGE_DEBUG: (afad86e2b9352ea0): Assigning stage ids. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf5be: 2016-10-18T21:04:19.390Z: JOB_MESSAGE_DEBUG: (6ea0de46e224f987): Executing wait step start2 INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf5c8: 2016-10-18T21:04:19.400Z: JOB_MESSAGE_BASIC: S01: (2de4ec76a3165b6d): Executing operation group/Create INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf5cb: 2016-10-18T21:04:19.403Z: JOB_MESSAGE_DEBUG: (8491705b90a34fbd): Executing operation write/WriteImpl/DoOnce INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf5e5: 2016-10-18T21:04:19.429Z: JOB_MESSAGE_DEBUG: (82f4625d69dc6081): Value "write/WriteImpl/DoOnce.out" materialized. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf5f0: 2016-10-18T21:04:19.440Z: JOB_MESSAGE_BASIC: S04: (5f34236610ccc0e1): Executing operation write/WriteImpl/initialize_write INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf694: 2016-10-18T21:04:19.604Z: JOB_MESSAGE_DEBUG: (ef48b2d9a419ccf0): Starting worker pool setup. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf696: 2016-10-18T21:04:19.606Z: JOB_MESSAGE_BASIC: (ef48b2d9a419ca3a): Starting 1 workers... INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf6aa: 2016-10-18T21:04:19.626Z: JOB_MESSAGE_DEBUG: (28ca00b690721d4d): Value "group/Session" materialized. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99bf6e0: 2016-10-18T21:04:19.680Z: JOB_MESSAGE_BASIC: S02: (4e0f72cd0696e1d4): Executing operation read+split+pair_with_one+group/Reify+group/Write INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99d7f14: 2016-10-18T21:06:00.084Z: JOB_MESSAGE_DETAILED: (474c745854a5aa85): Workers have started successfully. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e3f60: 2016-10-18T21:06:49.312Z: JOB_MESSAGE_DEBUG: (2787e482e40054ae): Value "write/WriteImpl/initialize_write.out" materialized. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e3f6a: 2016-10-18T21:06:49.322Z: JOB_MESSAGE_BASIC: S05: (4333f43c84da33bd): Executing operation write/WriteImpl/ViewAsSingleton(write|WriteImpl|initialize_write.None)/CreatePCollectionView INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e3f9b: 2016-10-18T21:06:49.371Z: JOB_MESSAGE_DEBUG: (2787e482e4005c04): Value "write/WriteImpl/ViewAsSingleton(write|WriteImpl|initialize_write.None)/CreatePCollectionView.out" materialized. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4622: 2016-10-18T21:06:51.042Z: JOB_MESSAGE_BASIC: S03: (2de4ec76a3165674): Executing operation group/Close INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e463a: 2016-10-18T21:06:51.066Z: JOB_MESSAGE_BASIC: S06: (82f4625d69dc6479): Executing operation write/WriteImpl/GroupByKey/Create INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e46ef: 2016-10-18T21:06:51.247Z: JOB_MESSAGE_DEBUG: (4333f43c84da3c3f): Value "write/WriteImpl/GroupByKey/Session" materialized. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e46fc: 2016-10-18T21:06:51.260Z: JOB_MESSAGE_BASIC: S07: (82f4625d69dc6f73): Executing operation group/Read+group/GroupByWindow+count+format+write/WriteImpl/write_bundles+write/WriteImpl/Map(<lambda at iobase.py:755>)+write/WriteImpl/WindowInto+write/WriteImpl/GroupByKey/Reify+write/WriteImpl/GroupByKey/Write INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e48a0: 2016-10-18T21:06:51.680Z: JOB_MESSAGE_ERROR: (3140c949fe86037b): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4972: 2016-10-18T21:06:51.890Z: JOB_MESSAGE_ERROR: (3140c949fe860af4): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4a16: 2016-10-18T21:06:52.054Z: JOB_MESSAGE_ERROR: (3140c949fe86026d): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4ae3: 2016-10-18T21:06:52.259Z: JOB_MESSAGE_ERROR: (3140c949fe860ef1): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4b8e: 2016-10-18T21:06:52.430Z: JOB_MESSAGE_ERROR: (3140c949fe86066a): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4bb6: 2016-10-18T21:06:52.470Z: JOB_MESSAGE_DEBUG: (afad86e2b9352547): Executing failure step failure1 INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4bb8: 2016-10-18T21:06:52.472Z: JOB_MESSAGE_ERROR: (afad86e2b9352fa5): Workflow failed. Causes: (82f4625d69dc6061): S07:group/Read+group/GroupByWindow+count+format+write/WriteImpl/write_bundles+write/WriteImpl/Map(<lambda at iobase.py:755>)+write/WriteImpl/WindowInto+write/WriteImpl/GroupByKey/Reify+write/WriteImpl/GroupByKey/Write failed. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4bec: 2016-10-18T21:06:52.524Z: JOB_MESSAGE_DETAILED: (5ca87e55b6c649af): Cleaning up. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4c73: 2016-10-18T21:06:52.659Z: JOB_MESSAGE_DEBUG: (5ca87e55b6c645a8): Starting worker pool teardown. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99e4c76: 2016-10-18T21:06:52.662Z: JOB_MESSAGE_BASIC: (5ca87e55b6c6484e): Stopping worker pool... INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99f84fd: 2016-10-18T21:08:12.669Z: JOB_MESSAGE_BASIC: (5ca87e55b6c64447): Worker pool stopped. INFO:root:2016-10-18_14_04_17-7862377147606629765_00000157d99f88f9: 2016-10-18T21:08:13.689Z: JOB_MESSAGE_DEBUG: (5ca87e55b6c642e6): Tearing down pending resources... INFO:root:Job 2016-10-18_14_04_17-7862377147606629765 is in state JOB_STATE_FAILED Traceback (most recent call last): File "/usr/lib/python2.7/runpy.py", line 162, in _run_module_as_main "__main__", fname, loader, pkg_name) File "/usr/lib/python2.7/runpy.py", line 72, in _run_code exec code in run_globals File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 107, in <module> run() File "<https://builds.apache.org/job/beam_PostCommit_PythonVerify/ws/sdks/python/apache_beam/examples/wordcount.py",> line 98, in run result = p.run() File "apache_beam/pipeline.py", line 159, in run return self.runner.run(self) File "apache_beam/runners/dataflow_runner.py", line 188, in run % getattr(self, 'last_error_msg', None), self.result) apache_beam.runners.dataflow_runner.DataflowRuntimeException: Dataflow pipeline failed: (3140c949fe86066a): Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 474, in do_work work_executor.execute() File "dataflow_worker/executor.py", line 909, in dataflow_worker.executor.MapTaskExecutor.execute (dataflow_worker/executor.c:24416) op.start() File "dataflow_worker/executor.py", line 473, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14278) def start(self): File "dataflow_worker/executor.py", line 500, in dataflow_worker.executor.DoOperation.start (dataflow_worker/executor.c:14093) self.dofn_runner = common.DoFnRunner( File "apache_beam/runners/common.py", line 84, in apache_beam.runners.common.DoFnRunner.__init__ (apache_beam/runners/common.c:3292) for side_input in side_inputs] File "dataflow_worker/executor.py", line 446, in _read_side_inputs (dataflow_worker/executor.c:12804) has_default, default = view_options ValueError: need more than 1 value to unpack # Grep will exit with status 1 if success message was not found. echo ">>> CHECKING JOB SUCCESS" >>> CHECKING JOB SUCCESS grep JOB_STATE_DONE job_output Build step 'Execute shell' marked build as failure