See
<https://builds.apache.org/job/beam_PostCommit_Python36/2465/display/redirect?page=changes>
Changes:
[daniel.o.programmer] [BEAM-9978] Adding functionality and tests to Go offset
range tracker.
------------------------------------------
[...truncated 10.99 MB...]
hamcrest_assert(actual, contains_inanyorder(*expected_list))
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <['animal', 'land vehicle']>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py",
line 179, in execute
op.start()
File "dataflow_worker/shuffle_operations.py", line 63, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 64, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 79, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 80, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 84, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "dataflow_worker/shuffle_operations.py", line 261, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
File "dataflow_worker/shuffle_operations.py", line 268, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 726, in
apache_beam.runners.common.PerWindowInvoker.invoke_process
File "apache_beam/runners/common.py", line 812, in
apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.SimpleInvoker.invoke_process
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1045, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line
446, in raise_with_traceback
raise exc.with_traceback(traceback)
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 554, in
apache_beam.runners.common.SimpleInvoker.invoke_process
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",>
line 1511, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",>
line 218, in _matches
hamcrest_assert(actual, contains_inanyorder(*expected_list))
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
RuntimeError: AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <['animal', 'land vehicle']> [while running
'assert_that/Match']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:21.982Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 554, in
apache_beam.runners.common.SimpleInvoker.invoke_process
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",>
line 1511, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",>
line 218, in _matches
hamcrest_assert(actual, contains_inanyorder(*expected_list))
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <['animal', 'land vehicle']>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py",
line 179, in execute
op.start()
File "dataflow_worker/shuffle_operations.py", line 63, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 64, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 79, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 80, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "dataflow_worker/shuffle_operations.py", line 84, in
dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "dataflow_worker/shuffle_operations.py", line 261, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
File "dataflow_worker/shuffle_operations.py", line 268, in
dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 726, in
apache_beam.runners.common.PerWindowInvoker.invoke_process
File "apache_beam/runners/common.py", line 812, in
apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1030, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.SimpleInvoker.invoke_process
File "apache_beam/runners/common.py", line 1122, in
apache_beam.runners.common._OutputProcessor.process_outputs
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
File "apache_beam/runners/common.py", line 963, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 1045, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line
446, in raise_with_traceback
raise exc.with_traceback(traceback)
File "apache_beam/runners/common.py", line 961, in
apache_beam.runners.common.DoFnRunner.process
File "apache_beam/runners/common.py", line 554, in
apache_beam.runners.common.SimpleInvoker.invoke_process
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",>
line 1511, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",>
line 218, in _matches
hamcrest_assert(actual, contains_inanyorder(*expected_list))
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 44, in assert_that
_assert_match(actual=arg1, matcher=arg2, reason=arg3)
File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py",
line 60, in _assert_match
raise AssertionError(description)
RuntimeError: AssertionError:
Expected: a sequence over [(a sequence containing 'bicycle' and a sequence
containing 'dinosaur')] in any order
but: not matched: <['animal', 'land vehicle']> [while running
'assert_that/Match']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.011Z:
JOB_MESSAGE_BASIC: Finished operation
assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.091Z:
JOB_MESSAGE_DEBUG: Executing failure step failure40
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.129Z:
JOB_MESSAGE_ERROR: Workflow failed. Causes:
S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
failed., The job failed because a work item has failed 4 times. Look in
previous log entries for the cause of each one of the 4 failures. For more
information, see https://cloud.google.com/dataflow/docs/guides/common-errors.
The work item was attempted on these workers:
beamapp-jenkins-052206550-05212355-1ism-harness-m1lw
Root cause: Work item failed.,
beamapp-jenkins-052206550-05212355-1ism-harness-m1lw
Root cause: Work item failed.,
beamapp-jenkins-052206550-05212355-1ism-harness-m1lw
Root cause: Work item failed.,
beamapp-jenkins-052206550-05212355-1ism-harness-m1lw
Root cause: Work item failed.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.258Z:
JOB_MESSAGE_DETAILED: Cleaning up.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.316Z:
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:02:22.350Z:
JOB_MESSAGE_BASIC: Stopping worker pool...
oauth2client.transport: INFO: Refreshing due to a 401 (attempt 1/2)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:03:54.038Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:03:54.149Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T07:03:54.184Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-05-21_23_55_19-8518556835570965574 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_33-11966027546088852639?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_16_56-7927781072652341814?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_25_04-14136690683562096595?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_33_10-7541880915743195272?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_41_41-18303213432286457255?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_49_37-18252402953818368776?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_57_52-11116582147016610848?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_28-18276746257205741566?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_25_09-2013079730589240701?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_33_49-13914489178249810154?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_42_41-2709265374187740011?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_52_27-1555098218360286662?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_59_55-12128656151264967336?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_29-18325383591300580170?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_15_12-1188017314050293847?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_23_23-14475852562490374536?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_32_10-18109464549489390464?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_41_43-1014722812482182677?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_49_46-16864441104293008029?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_58_11-10020119010773041585?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_00_06_07-16275318326965707585?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_27-9999263049389615660?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_20_43-6107862619803865216?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_29_21-1623267390052350998?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_37_58-10530580434876334421?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_46_39-8411568483141253308?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_55_19-8518556835570965574?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_29-3646216528281297655?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_11_40-1067272507914712844?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_20_30-8828203358883152183?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_30_12-3415267002029596251?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_39_27-17928027751419651647?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_56_39-16831733050257016172?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_29-12095901082738814051?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_11_34-109019688557727001?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_20_24-13053885047174275153?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_28_49-9319590615480128855?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_37_15-14013785701969521085?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_44_50-10458253190527521387?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_52_50-14083948259050546802?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_27-7248813184783233293?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_10_48-3188502047657209462?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_19_23-1849523543990570840?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_27_49-11448766124895475689?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_35_57-7123510371596717407?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_44_12-15649544748383839359?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_52_11-402883564824704193?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_00_00_01-13036310211579953937?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_02_28-15938622827523907355?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_12_29-6437885433184863248?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_22_43-13397578492157116655?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_31_07-10789006728141228143?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_39_04-6961710661494357752?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_47_49-6087518936364888347?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_23_56_04-14618385644888291323?project=apache-beam-testing
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py36.xml
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 63 tests in 4327.563s
FAILED (SKIP=7, errors=1)
> Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED
FAILURE: Build failed with an exception.
* Where:
Script
'<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 50
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 13m 47s
86 actionable tasks: 63 executed, 23 from cache
Publishing build scan...
https://gradle.com/s/6giorn45f37sc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]