See
<https://builds.apache.org/job/beam_PostCommit_Python2/2455/display/redirect>
Changes:
------------------------------------------
[...truncated 11.72 MB...]
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 726, in
apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 812, in
apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1045, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 554, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File
"<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/apache_beam/transforms/core.py",>
line 1511, in <lambda>
File
"<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/apache_beam/testing/util.py",>
line 218, in _matches
File "/usr/local/lib/python2.7/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python2.7/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
RuntimeError: AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <[u'land vehicle', u'animal']> [while running
'assert_that/Match']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:18.728Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:19.942Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python2.7/site-packages/dataflow_worker/executor.py",
line 179, in execute
op.start()
File "dataflow_worker/shuffle_operations.py", line 63, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
def start(self):
File "dataflow_worker/shuffle_operations.py", line 64, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
with self.scoped_start_state:
File "dataflow_worker/shuffle_operations.py", line 79, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
with self.scoped_process_state:
File "dataflow_worker/shuffle_operations.py", line 80, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
with self.shuffle_source.reader() as reader:
File "dataflow_worker/shuffle_operations.py", line 84, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
self.output(windowed_value)
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "dataflow_worker/shuffle_operations.py", line 261, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
with self.scoped_process_state:
File "dataflow_worker/shuffle_operations.py", line 268, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
self.output(wvalue.with_value((k, wvalue.value)))
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 726, in
apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 812, in
apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1045, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 554, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File
"<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/apache_beam/transforms/core.py",>
line 1511, in <lambda>
File
"<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/apache_beam/testing/util.py",>
line 218, in _matches
File "/usr/local/lib/python2.7/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python2.7/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
RuntimeError: AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <[u'land vehicle', u'animal']> [while running
'assert_that/Match']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:19.972Z:
JOB_MESSAGE_BASIC: Finished operation
assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:20.054Z:
JOB_MESSAGE_DEBUG: Executing failure step failure40
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:20.087Z:
JOB_MESSAGE_ERROR: Workflow failed. Causes:
S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
failed., The job failed because a work item has failed 4 times. Look in
previous log entries for the cause of each one of the 4 failures. For more
information, see https://cloud.google.com/dataflow/docs/guides/common-errors.
The work item was attempted on these workers:
beamapp-jenkins-052300545-05221755-ohv0-harness-v5qx
Root cause: Work item failed.,
beamapp-jenkins-052300545-05221755-ohv0-harness-v5qx
Root cause: Work item failed.,
beamapp-jenkins-052300545-05221755-ohv0-harness-v5qx
Root cause: Work item failed.,
beamapp-jenkins-052300545-05221755-ohv0-harness-v5qx
Root cause: Work item failed.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:20.210Z:
JOB_MESSAGE_DETAILED: Cleaning up.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:20.274Z:
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:01:20.310Z:
JOB_MESSAGE_BASIC: Stopping worker pool...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:03:15.086Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:03:15.171Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T01:03:15.213Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-05-22_17_55_12-7921506914716703077 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df.xml
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 63 tests in 4096.732s
FAILED (SKIP=7, errors=1)
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_05_01-10669922975433653050?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_19_13-14220984954577808014?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_26_32-8983555540577572720?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_34_37-14985489341674066386?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_42_49-12906121777497406901?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_50_33-9295203933448523443?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_59_37-10269173943879729121?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_55-2583716547645208123?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_25_12-2846202574006576682?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_33_07-17653689573562099737?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_40_44-6654408865288151884?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_48_15-831640217225427026?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_56_01-3408160779164993881?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_59-6353216595082362850?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_17_16-17820557282281737958?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_24_35-4123651466227733430?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_32_38-10486921442452600395?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_41_15-12885480244775489250?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_49_13-17514021816976327236?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_56_37-16867153970007412877?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_55-6999064063788253994?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_22_43-17419094300564939934?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_30_44-4331211443039866281?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_38_50-8708561895650192277?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_46_57-4375412445910963928?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_55_12-7921506914716703077?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_55-6325121463146186989?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_12_41-17267913760857115260?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_21_46-15734756527116577231?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_31_25-15873503542398117508?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_39_46-6383080705849242097?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_47_55-8110164905857297036?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_55_19-8768425437096272103?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_53-17628952830513868854?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_12_38-1128373212455224657?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_21_20-17221402520060312164?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_29_20-15074699397188943499?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_38_53-11060037279564697887?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_46_18-14900456389680013829?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_54_37-17048998676320450875?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_56-7277711632192251284?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_13_16-9123965449306998407?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_21_24-11908446410332451204?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_29_18-1967525118495189225?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_37_07-11613631498582385217?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_44_30-8800870532828191780?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_53_32-7997746799465210628?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_18_00_54-7248439131096382272?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_04_57-990132432700555909?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_14_35-15955685261552932746?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_24_34-2976282228749275728?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_32_52-16992557989805684154?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_40_18-18166872971325099063?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_17_57_33-11371612944972422744?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_18_05_12-16944706044351832552?project=apache-beam-testing
> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
line: 116
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 12m 17s
123 actionable tasks: 96 executed, 25 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/lsoeck3fhfiv4
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]