See <https://builds.apache.org/job/beam_PostCommit_Python36/2469/display/redirect>
Changes: ------------------------------------------ [...truncated 10.99 MB...] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.473Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.503Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.590Z: JOB_MESSAGE_DEBUG: Executing failure step failure40 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.634Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-052306534-05222353-0g4j-harness-fg4p Root cause: Work item failed., beamapp-jenkins-052306534-05222353-0g4j-harness-fg4p Root cause: Work item failed., beamapp-jenkins-052306534-05222353-0g4j-harness-fg4p Root cause: Work item failed., beamapp-jenkins-052306534-05222353-0g4j-harness-fg4p Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.767Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.846Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:00:36.890Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:01:57.662Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:01:57.712Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T07:01:57.752Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-05-22_23_53_56-14168537477151658312 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_32-15715489472025352244?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_16_52-15909221826794755275?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_25_01-9925200534854774285?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_32_49-2848303977523059469?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_41_19-12651236408886562488?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_49_23-6826284237852117144?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_57_01-2936611160050745797?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_29-10600709609441369604?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_23_31-12976641904586142498?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_31_24-1603736771339883236?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_39_39-14409326614358804791?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_47_38-4035063159179364422?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_55_19-7975445932843230184?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_29-12987662873339539912?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_14_36-7286583488790896310?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_22_32-17339828984822217635?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_30_55-9784439107599476683?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_39_53-16631532218763885203?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_47_27-6728397848428372909?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_55_53-859334866599255171?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_30-11780375380962844286?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_20_20-12679529587993982746?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_30_19-9846684886115176057?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_38_29-11530514650720886724?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_46_42-8511657846853678647?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_54_46-12776483298537916364?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_28-424858826504730209?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_11_11-2811633099804675020?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_20_07-8851708585642289520?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_28_24-10808957192077887759?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_36_41-5060628515654993064?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_44_55-13448417963116485569?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_53_10-8963147384749047765?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_28-11462927026785805262?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_10_56-17972614535691350738?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_19_21-12062338174407250461?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_26_56-4619671841013151683?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_34_17-7196516206620788821?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_41_56-4701126330820231951?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_50_53-1200230475509870406?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_59_27-12349008011648031607?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_28-13256095284965223919?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_11_45-10176274905757298073?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_20_39-4338372181045509970?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_28_58-7069597793635141129?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_37_10-4525175074354698041?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_45_25-16200982985192146094?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_53_56-14168537477151658312?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_02_31-17062022499025359433?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_12_37-15309417747571229877?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_23_12-3507884100540281641?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_31_05-3202458051014578044?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_39_24-3039195217575326089?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-22_23_56_46-15640166190684110119?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_00_04_35-6838506865577428240?project=apache-beam-testing ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py36.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 63 tests in 4244.277s FAILED (SKIP=7, errors=1) > Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 50 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 12m 21s 86 actionable tasks: 63 executed, 23 from cache Publishing build scan... https://gradle.com/s/k7zrofs6fcysc Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
