See <https://builds.apache.org/job/beam_PostCommit_Python35/2486/display/redirect>
Changes: ------------------------------------------ [...truncated 11.01 MB...] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.5/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.5/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.589Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.5/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.5/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.618Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.701Z: JOB_MESSAGE_DEBUG: Executing failure step failure40 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.740Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-052406494-05232349-b55h-harness-79td Root cause: Work item failed., beamapp-jenkins-052406494-05232349-b55h-harness-79td Root cause: Work item failed., beamapp-jenkins-052406494-05232349-b55h-harness-79td Root cause: Work item failed., beamapp-jenkins-052406494-05232349-b55h-harness-79td Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.861Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.918Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:56:32.949Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:57:16.533Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:57:16.588Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-24T06:57:16.616Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-05-23_23_49_52-6294600728212933510 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_38-11399002913256755427?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_16_03-4426344995265838576?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_22_50-17681614809541643585?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_29_38-8939582908745744649?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_37_07-10798895968351046832?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_44_05-13189865369750087601?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_51_00-2834369432825917809?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_34-16602520457617619533?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_24_23-13395172558854746736?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_32_26-9956190152759358542?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_39_04-7780843951015811419?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_47_33-16002117658348672741?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_54_00-6911350680669099988?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_36-3980589128873561708?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_14_04-7200436260147929180?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_20_57-10148057902271742961?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_28_24-17020185464119690371?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_35_57-2516451161493963782?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_42_55-11482250842768711017?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_50_01-2549831247469511601?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_35-17287694501114887828?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_19_36-16044841583857750650?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_27_04-4898218238229073599?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_34_49-10288966002122907694?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_42_06-4487845526775025417?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_49_52-6294600728212933510?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_35-12862060776683188932?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_10_04-12047203816172068467?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_17_54-10469467763188763869?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_24_26-7988283822334702576?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_31_13-11309121850807511317?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_39_13-8463080671042306839?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_47_27-2550704187637198274?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_34-14911044626125324421?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_10_13-11240296974011415178?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_18_00-13196464076031052805?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_25_08-7306337207979630497?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_32_27-1825721289605080585?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_38_59-3081792760392067533?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_46_17-11577605546124378494?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_53_27-3081345456115024544?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_36-94191561527166852?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_10_38-6163759721667170406?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_18_57-5792090381927364554?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_27_45-380423717638141157?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_35_32-6599606976522405710?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_43_12-18201195986008577888?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_50_29-4652099819993691809?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_02_35-3220357436542267438?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_11_26-3657744060257720987?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_21_24-13810982105929965886?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_28_52-13712715711808317641?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_35_42-10207212014049704876?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_52_13-7418850503022233857?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_23_59_30-7372117190656704874?project=apache-beam-testing ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py35.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 63 tests in 3899.564s FAILED (SKIP=7, errors=1) > Task :sdks:python:test-suites:dataflow:py35:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 50 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py35:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 6m 44s 86 actionable tasks: 63 executed, 23 from cache Publishing build scan... https://gradle.com/s/2oajgvzzn55sq Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
