See
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/1387/display/redirect>
------------------------------------------
[...truncated 1.35 MB...]
File "dataflow_worker/opcounters.py", line 57, in
dataflow_worker.opcounters.OperationCounters.update_from
(dataflow_worker/opcounters.c:2396)
self.do_sample(windowed_value)
File "dataflow_worker/opcounters.py", line 75, in
dataflow_worker.opcounters.OperationCounters.do_sample
(dataflow_worker/opcounters.c:3017)
self.coder_impl.get_estimated_size_and_observables(windowed_value))
File "apache_beam/coders/coder_impl.py", line 695, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22894)
def get_estimated_size_and_observables(self, value, nested=False):
File "apache_beam/coders/coder_impl.py", line 704, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22613)
self._value_coder.get_estimated_size_and_observables(
File "apache_beam/coders/coder_impl.py", line 247, in
apache_beam.coders.coder_impl.FastPrimitivesCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:9564)
out = ByteCountingOutputStream()
File "apache_beam/coders/stream.pyx", line 28, in
apache_beam.coders.stream.OutputStream.__cinit__
(apache_beam/coders/stream.c:1241)
self.buffer_size = 1024
AttributeError: 'apache_beam.coders.stream.ByteCountingOutputStream' object has
no attribute 'buffer_size' [while running 'compute']
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b39674:
2017-02-28T17:10:23.604Z: JOB_MESSAGE_ERROR: (7c5adccc0c4a5beb): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 544, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 971, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30533)
with op.scoped_metrics_container:
File "dataflow_worker/executor.py", line 972, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30481)
op.start()
File "dataflow_worker/executor.py", line 207, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8758)
def start(self):
File "dataflow_worker/executor.py", line 208, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8663)
with self.scoped_start_state:
File "dataflow_worker/executor.py", line 213, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8579)
with self.spec.source.reader() as reader:
File "dataflow_worker/executor.py", line 223, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8524)
self.output(windowed_value)
File "dataflow_worker/executor.py", line 151, in
dataflow_worker.executor.Operation.output (dataflow_worker/executor.c:6317)
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "dataflow_worker/executor.py", line 84, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:4021)
cython.cast(Operation, consumer).process(windowed_value)
File "dataflow_worker/executor.py", line 544, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18474)
with self.scoped_process_state:
File "dataflow_worker/executor.py", line 545, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18428)
self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 195, in
apache_beam.runners.common.DoFnRunner.receive
(apache_beam/runners/common.c:5142)
self.process(windowed_value)
File "apache_beam/runners/common.py", line 267, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7201)
self.reraise_augmented(exn)
File "apache_beam/runners/common.py", line 279, in
apache_beam.runners.common.DoFnRunner.reraise_augmented
(apache_beam/runners/common.c:7590)
raise type(exn), args, sys.exc_info()[2]
File "apache_beam/runners/common.py", line 265, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7112)
self._dofn_invoker(element)
File "apache_beam/runners/common.py", line 232, in
apache_beam.runners.common.DoFnRunner._dofn_invoker
(apache_beam/runners/common.c:6131)
self._dofn_per_window_invoker(element)
File "apache_beam/runners/common.py", line 218, in
apache_beam.runners.common.DoFnRunner._dofn_per_window_invoker
(apache_beam/runners/common.c:5877)
self._process_outputs(element, self.dofn_process(*args))
File "apache_beam/runners/common.py", line 326, in
apache_beam.runners.common.DoFnRunner._process_outputs
(apache_beam/runners/common.c:8563)
self.main_receivers.receive(windowed_value)
File "dataflow_worker/executor.py", line 82, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:3987)
self.update_counters_start(windowed_value)
File "dataflow_worker/executor.py", line 88, in
dataflow_worker.executor.ConsumerSet.update_counters_start
(dataflow_worker/executor.c:4207)
self.opcounter.update_from(windowed_value)
File "dataflow_worker/opcounters.py", line 57, in
dataflow_worker.opcounters.OperationCounters.update_from
(dataflow_worker/opcounters.c:2396)
self.do_sample(windowed_value)
File "dataflow_worker/opcounters.py", line 75, in
dataflow_worker.opcounters.OperationCounters.do_sample
(dataflow_worker/opcounters.c:3017)
self.coder_impl.get_estimated_size_and_observables(windowed_value))
File "apache_beam/coders/coder_impl.py", line 695, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22894)
def get_estimated_size_and_observables(self, value, nested=False):
File "apache_beam/coders/coder_impl.py", line 704, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22613)
self._value_coder.get_estimated_size_and_observables(
File "apache_beam/coders/coder_impl.py", line 247, in
apache_beam.coders.coder_impl.FastPrimitivesCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:9564)
out = ByteCountingOutputStream()
File "apache_beam/coders/stream.pyx", line 28, in
apache_beam.coders.stream.OutputStream.__cinit__
(apache_beam/coders/stream.c:1241)
self.buffer_size = 1024
AttributeError: 'apache_beam.coders.stream.ByteCountingOutputStream' object has
no attribute 'buffer_size' [while running 'compute']
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3a2d8:
2017-02-28T17:10:26.776Z: JOB_MESSAGE_ERROR: (7c5adccc0c4a5f58): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 544, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 971, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30533)
with op.scoped_metrics_container:
File "dataflow_worker/executor.py", line 972, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30481)
op.start()
File "dataflow_worker/executor.py", line 207, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8758)
def start(self):
File "dataflow_worker/executor.py", line 208, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8663)
with self.scoped_start_state:
File "dataflow_worker/executor.py", line 213, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8579)
with self.spec.source.reader() as reader:
File "dataflow_worker/executor.py", line 223, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8524)
self.output(windowed_value)
File "dataflow_worker/executor.py", line 151, in
dataflow_worker.executor.Operation.output (dataflow_worker/executor.c:6317)
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "dataflow_worker/executor.py", line 84, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:4021)
cython.cast(Operation, consumer).process(windowed_value)
File "dataflow_worker/executor.py", line 544, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18474)
with self.scoped_process_state:
File "dataflow_worker/executor.py", line 545, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18428)
self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 195, in
apache_beam.runners.common.DoFnRunner.receive
(apache_beam/runners/common.c:5142)
self.process(windowed_value)
File "apache_beam/runners/common.py", line 267, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7201)
self.reraise_augmented(exn)
File "apache_beam/runners/common.py", line 279, in
apache_beam.runners.common.DoFnRunner.reraise_augmented
(apache_beam/runners/common.c:7590)
raise type(exn), args, sys.exc_info()[2]
File "apache_beam/runners/common.py", line 265, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7112)
self._dofn_invoker(element)
File "apache_beam/runners/common.py", line 232, in
apache_beam.runners.common.DoFnRunner._dofn_invoker
(apache_beam/runners/common.c:6131)
self._dofn_per_window_invoker(element)
File "apache_beam/runners/common.py", line 218, in
apache_beam.runners.common.DoFnRunner._dofn_per_window_invoker
(apache_beam/runners/common.c:5877)
self._process_outputs(element, self.dofn_process(*args))
File "apache_beam/runners/common.py", line 326, in
apache_beam.runners.common.DoFnRunner._process_outputs
(apache_beam/runners/common.c:8563)
self.main_receivers.receive(windowed_value)
File "dataflow_worker/executor.py", line 82, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:3987)
self.update_counters_start(windowed_value)
File "dataflow_worker/executor.py", line 88, in
dataflow_worker.executor.ConsumerSet.update_counters_start
(dataflow_worker/executor.c:4207)
self.opcounter.update_from(windowed_value)
File "dataflow_worker/opcounters.py", line 57, in
dataflow_worker.opcounters.OperationCounters.update_from
(dataflow_worker/opcounters.c:2396)
self.do_sample(windowed_value)
File "dataflow_worker/opcounters.py", line 75, in
dataflow_worker.opcounters.OperationCounters.do_sample
(dataflow_worker/opcounters.c:3017)
self.coder_impl.get_estimated_size_and_observables(windowed_value))
File "apache_beam/coders/coder_impl.py", line 695, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22894)
def get_estimated_size_and_observables(self, value, nested=False):
File "apache_beam/coders/coder_impl.py", line 704, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22613)
self._value_coder.get_estimated_size_and_observables(
File "apache_beam/coders/coder_impl.py", line 247, in
apache_beam.coders.coder_impl.FastPrimitivesCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:9564)
out = ByteCountingOutputStream()
File "apache_beam/coders/stream.pyx", line 28, in
apache_beam.coders.stream.OutputStream.__cinit__
(apache_beam/coders/stream.c:1241)
self.buffer_size = 1024
AttributeError: 'apache_beam.coders.stream.ByteCountingOutputStream' object has
no attribute 'buffer_size' [while running 'compute']
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3af41:
2017-02-28T17:10:29.953Z: JOB_MESSAGE_ERROR: (7c5adccc0c4a52c5): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 544, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 971, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30533)
with op.scoped_metrics_container:
File "dataflow_worker/executor.py", line 972, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30481)
op.start()
File "dataflow_worker/executor.py", line 207, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8758)
def start(self):
File "dataflow_worker/executor.py", line 208, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8663)
with self.scoped_start_state:
File "dataflow_worker/executor.py", line 213, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8579)
with self.spec.source.reader() as reader:
File "dataflow_worker/executor.py", line 223, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8524)
self.output(windowed_value)
File "dataflow_worker/executor.py", line 151, in
dataflow_worker.executor.Operation.output (dataflow_worker/executor.c:6317)
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "dataflow_worker/executor.py", line 84, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:4021)
cython.cast(Operation, consumer).process(windowed_value)
File "dataflow_worker/executor.py", line 544, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18474)
with self.scoped_process_state:
File "dataflow_worker/executor.py", line 545, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18428)
self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 195, in
apache_beam.runners.common.DoFnRunner.receive
(apache_beam/runners/common.c:5142)
self.process(windowed_value)
File "apache_beam/runners/common.py", line 267, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7201)
self.reraise_augmented(exn)
File "apache_beam/runners/common.py", line 279, in
apache_beam.runners.common.DoFnRunner.reraise_augmented
(apache_beam/runners/common.c:7590)
raise type(exn), args, sys.exc_info()[2]
File "apache_beam/runners/common.py", line 265, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7112)
self._dofn_invoker(element)
File "apache_beam/runners/common.py", line 232, in
apache_beam.runners.common.DoFnRunner._dofn_invoker
(apache_beam/runners/common.c:6131)
self._dofn_per_window_invoker(element)
File "apache_beam/runners/common.py", line 218, in
apache_beam.runners.common.DoFnRunner._dofn_per_window_invoker
(apache_beam/runners/common.c:5877)
self._process_outputs(element, self.dofn_process(*args))
File "apache_beam/runners/common.py", line 326, in
apache_beam.runners.common.DoFnRunner._process_outputs
(apache_beam/runners/common.c:8563)
self.main_receivers.receive(windowed_value)
File "dataflow_worker/executor.py", line 82, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:3987)
self.update_counters_start(windowed_value)
File "dataflow_worker/executor.py", line 88, in
dataflow_worker.executor.ConsumerSet.update_counters_start
(dataflow_worker/executor.c:4207)
self.opcounter.update_from(windowed_value)
File "dataflow_worker/opcounters.py", line 57, in
dataflow_worker.opcounters.OperationCounters.update_from
(dataflow_worker/opcounters.c:2396)
self.do_sample(windowed_value)
File "dataflow_worker/opcounters.py", line 75, in
dataflow_worker.opcounters.OperationCounters.do_sample
(dataflow_worker/opcounters.c:3017)
self.coder_impl.get_estimated_size_and_observables(windowed_value))
File "apache_beam/coders/coder_impl.py", line 695, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22894)
def get_estimated_size_and_observables(self, value, nested=False):
File "apache_beam/coders/coder_impl.py", line 704, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22613)
self._value_coder.get_estimated_size_and_observables(
File "apache_beam/coders/coder_impl.py", line 247, in
apache_beam.coders.coder_impl.FastPrimitivesCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:9564)
out = ByteCountingOutputStream()
File "apache_beam/coders/stream.pyx", line 28, in
apache_beam.coders.stream.OutputStream.__cinit__
(apache_beam/coders/stream.c:1241)
self.buffer_size = 1024
AttributeError: 'apache_beam.coders.stream.ByteCountingOutputStream' object has
no attribute 'buffer_size' [while running 'compute']
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bbc3:
2017-02-28T17:10:33.155Z: JOB_MESSAGE_ERROR: (7c5adccc0c4a5632): Traceback
(most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 544, in do_work
work_executor.execute()
File "dataflow_worker/executor.py", line 971, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30533)
with op.scoped_metrics_container:
File "dataflow_worker/executor.py", line 972, in
dataflow_worker.executor.MapTaskExecutor.execute
(dataflow_worker/executor.c:30481)
op.start()
File "dataflow_worker/executor.py", line 207, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8758)
def start(self):
File "dataflow_worker/executor.py", line 208, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8663)
with self.scoped_start_state:
File "dataflow_worker/executor.py", line 213, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8579)
with self.spec.source.reader() as reader:
File "dataflow_worker/executor.py", line 223, in
dataflow_worker.executor.ReadOperation.start (dataflow_worker/executor.c:8524)
self.output(windowed_value)
File "dataflow_worker/executor.py", line 151, in
dataflow_worker.executor.Operation.output (dataflow_worker/executor.c:6317)
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "dataflow_worker/executor.py", line 84, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:4021)
cython.cast(Operation, consumer).process(windowed_value)
File "dataflow_worker/executor.py", line 544, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18474)
with self.scoped_process_state:
File "dataflow_worker/executor.py", line 545, in
dataflow_worker.executor.DoOperation.process (dataflow_worker/executor.c:18428)
self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 195, in
apache_beam.runners.common.DoFnRunner.receive
(apache_beam/runners/common.c:5142)
self.process(windowed_value)
File "apache_beam/runners/common.py", line 267, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7201)
self.reraise_augmented(exn)
File "apache_beam/runners/common.py", line 279, in
apache_beam.runners.common.DoFnRunner.reraise_augmented
(apache_beam/runners/common.c:7590)
raise type(exn), args, sys.exc_info()[2]
File "apache_beam/runners/common.py", line 265, in
apache_beam.runners.common.DoFnRunner.process
(apache_beam/runners/common.c:7112)
self._dofn_invoker(element)
File "apache_beam/runners/common.py", line 232, in
apache_beam.runners.common.DoFnRunner._dofn_invoker
(apache_beam/runners/common.c:6131)
self._dofn_per_window_invoker(element)
File "apache_beam/runners/common.py", line 218, in
apache_beam.runners.common.DoFnRunner._dofn_per_window_invoker
(apache_beam/runners/common.c:5877)
self._process_outputs(element, self.dofn_process(*args))
File "apache_beam/runners/common.py", line 326, in
apache_beam.runners.common.DoFnRunner._process_outputs
(apache_beam/runners/common.c:8563)
self.main_receivers.receive(windowed_value)
File "dataflow_worker/executor.py", line 82, in
dataflow_worker.executor.ConsumerSet.receive (dataflow_worker/executor.c:3987)
self.update_counters_start(windowed_value)
File "dataflow_worker/executor.py", line 88, in
dataflow_worker.executor.ConsumerSet.update_counters_start
(dataflow_worker/executor.c:4207)
self.opcounter.update_from(windowed_value)
File "dataflow_worker/opcounters.py", line 57, in
dataflow_worker.opcounters.OperationCounters.update_from
(dataflow_worker/opcounters.c:2396)
self.do_sample(windowed_value)
File "dataflow_worker/opcounters.py", line 75, in
dataflow_worker.opcounters.OperationCounters.do_sample
(dataflow_worker/opcounters.c:3017)
self.coder_impl.get_estimated_size_and_observables(windowed_value))
File "apache_beam/coders/coder_impl.py", line 695, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22894)
def get_estimated_size_and_observables(self, value, nested=False):
File "apache_beam/coders/coder_impl.py", line 704, in
apache_beam.coders.coder_impl.WindowedValueCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:22613)
self._value_coder.get_estimated_size_and_observables(
File "apache_beam/coders/coder_impl.py", line 247, in
apache_beam.coders.coder_impl.FastPrimitivesCoderImpl.get_estimated_size_and_observables
(apache_beam/coders/coder_impl.c:9564)
out = ByteCountingOutputStream()
File "apache_beam/coders/stream.pyx", line 28, in
apache_beam.coders.stream.OutputStream.__cinit__
(apache_beam/coders/stream.c:1241)
self.buffer_size = 1024
AttributeError: 'apache_beam.coders.stream.ByteCountingOutputStream' object has
no attribute 'buffer_size' [while running 'compute']
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bc4c:
2017-02-28T17:10:33.292Z: JOB_MESSAGE_DEBUG: (d7330d0acbfd8f1a): Executing
failure step failure12
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bc4f:
2017-02-28T17:10:33.295Z: JOB_MESSAGE_ERROR: (d7330d0acbfd8440): Workflow
failed. Causes: (d7330d0acbfd8fa8):
S03:compute+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/Reify+assert_that/Group/Write
failed.
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bc8b:
2017-02-28T17:10:33.355Z: JOB_MESSAGE_DETAILED: (4c676cefa2ab401f): Cleaning up.
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bd09:
2017-02-28T17:10:33.481Z: JOB_MESSAGE_DEBUG: (4c676cefa2ab4429): Starting
worker pool teardown.
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b3bd0b:
2017-02-28T17:10:33.483Z: JOB_MESSAGE_BASIC: (4c676cefa2ab4833): Stopping
worker pool...
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b4ce82:
2017-02-28T17:11:43.490Z: JOB_MESSAGE_BASIC: (4c676cefa2ab4e42): Worker pool
stopped.
root: INFO: 2017-02-28_09_06_17-7834325279453351626_0000015a85b4cea5:
2017-02-28T17:11:43.525Z: JOB_MESSAGE_DEBUG: (4c676cefa2ab4a60): Tearing down
pending resources...
root: INFO: Job 2017-02-28_09_06_17-7834325279453351626 is in state
JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
Ran 14 tests in 4319.139s
FAILED (errors=13)
Build step 'Execute shell' marked build as failure