See <https://builds.apache.org/job/beam_PostCommit_Python37/2387/display/redirect>
Changes: ------------------------------------------ [...truncated 11.08 MB...] File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.7/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.7/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.7/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.056Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.7/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.7/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.7/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.7/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.081Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.132Z: JOB_MESSAGE_DEBUG: Executing failure step failure40 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.161Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-052418524-05241152-21s9-harness-txx1 Root cause: Work item failed., beamapp-jenkins-052418524-05241152-21s9-harness-txx1 Root cause: Work item failed., beamapp-jenkins-052418524-05241152-21s9-harness-txx1 Root cause: Work item failed., beamapp-jenkins-052418524-05241152-21s9-harness-txx1 Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.260Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.311Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T18:59:30.337Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T19:00:14.518Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T19:00:14.562Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T19:00:14.600Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-05-24_11_52_53-1212234193272993849 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_25-11605828765450632630?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_18_46-17042816108840391491?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_25_47-14091011256070880524?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_32_17-7283137769772169568?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_40_19-17756414262703356828?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_47_38-16528196502667956181?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_54_25-17700939775848573378?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_12_01_12-14994548144791693591?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_20-2655424026792448703?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_22_52-8286049058891726234?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_30_29-7091468701097352049?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_37_41-15653838896137256240?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_45_00-18310113284239808527?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_52_53-1212234193272993849?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_21-10603230521845661171?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_22_59-6259477717696671821?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_30_50-30561736389182070?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_37_53-913047006015005887?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_46_22-3459124932007794214?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_53_19-17268718468900848734?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_23-11498544593487026912?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_16_52-10325731097220801293?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_23_53-15092442299708221789?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_31_00-14782719523917318202?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_38_22-3812494481810214287?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_45_39-1297515054799437255?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_53_31-6147429544826794002?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_20-16784030254026489586?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_13_13-6369977418939796823?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_21_11-11317282311695101750?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_28_43-5539148693986632391?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_35_51-14115813235023888244?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_43_07-1402262886397837959?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_50_45-15960206749100754811?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_19-8799895023479208802?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_12_29-18283045926399091047?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_20_17-10266302207650933582?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_27_08-4014724004255703955?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_34_09-981782386724809070?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_41_07-11533690541065903114?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_48_59-13590702475984962508?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_56_21-6850435422932298449?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_21-11545569720734816040?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_13_24-1038773290065704604?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_21_41-14492662073497936259?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_30_48-11188722154182189229?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_38_13-8429284594760237294?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_54_34-16211191946758930763?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_05_21-10295936806886969190?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_14_38-8534538660291749077?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_24_00-7079147785744550352?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_31_33-3316160120142964255?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_38_34-17764826797027689309?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_45_41-16900635281603746293?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-24_11_52_44-6122900084977610247?project=apache-beam-testing ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py37.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 63 tests in 3793.921s FAILED (SKIP=7, errors=1) > Task :sdks:python:test-suites:dataflow:py37:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 50 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py37:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 4m 50s 87 actionable tasks: 64 executed, 23 from cache Publishing build scan... https://gradle.com/s/34wyi32pfbpsq Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
