See <https://builds.apache.org/job/beam_PostCommit_Python35/2484/display/redirect>
Changes: ------------------------------------------ [...truncated 11.00 MB...] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.5/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.5/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.539Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.5/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.5/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.5/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.5/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['animal', 'land vehicle']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.563Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.632Z: JOB_MESSAGE_DEBUG: Executing failure step failure40 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.667Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-052318552-05231155-qx9g-harness-gn5b Root cause: Work item failed., beamapp-jenkins-052318552-05231155-qx9g-harness-gn5b Root cause: Work item failed., beamapp-jenkins-052318552-05231155-qx9g-harness-gn5b Root cause: Work item failed., beamapp-jenkins-052318552-05231155-qx9g-harness-gn5b Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.792Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.846Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:02:05.883Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:03:21.193Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:03:21.244Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-23T19:03:21.278Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-05-23_11_55_32-2439027563359300021 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_03_03-10085928936947277158?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_17_05-10523174639860823966?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_25_14-7430812503760829287?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_33_15-13437218635127938181?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_42_51-15373890755629335023?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_50_06-4904919940175383220?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_57_27-17681105970497225754?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_02_58-6338040671995314729?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_22_14-5890800601478816410?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_30_42-8013990776494760617?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_39_23-6871077750937203806?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_56_29-10982757737090021803?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_12_04_28-11749391034181447043?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_03_01-13114469316643727797?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_15_10-6761279624424920238?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_23_10-6464484718917973433?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_31_59-13672821594493134997?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_40_24-13893716988320173574?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_47_58-3486360692938381491?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_56_26-3131266011827461083?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_03_00-18406940509609898282?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_22_33-1000995337586083244?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_30_40-9979406666601856883?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_39_18-13923511742604104663?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_48_02-6413147616736736535?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_56_26-2785747860918137636?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_03_00-6750183543311807045?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_11_36-10028520829058659821?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_20_41-16114814515537432538?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_29_10-7884205848322375603?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_37_15-17081172478043143883?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_45_19-3191139524878754961?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_54_12-631506559543935798?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_02_58-1762812249989282755?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_10_45-13850786751601505629?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_19_40-16327511250493301735?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_27_54-8527218977399116630?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_36_04-6527905750773458340?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_43_15-15917867547504417335?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_51_40-4884367881570443938?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_59_49-3410544964326411272?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_03_00-6660851029313369499?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_11_57-17031935257151473984?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_20_50-10638678250949024018?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_30_14-15023013966439585227?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_38_34-12374679205980812350?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_46_49-1385443321167773689?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_55_32-2439027563359300021?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_02_59-4497765339852894534?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_13_40-9927005835584496205?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_24_18-14747052487230630727?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_33_13-12830973936673936996?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_41_29-3533831311027748301?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_49_47-9441564331946973079?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-23_11_57_37-6328200896713747409?project=apache-beam-testing ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py35.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 63 tests in 4178.355s FAILED (SKIP=7, errors=1) > Task :sdks:python:test-suites:dataflow:py35:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://builds.apache.org/job/beam_PostCommit_Python35/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 50 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py35:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 11m 39s 86 actionable tasks: 63 executed, 23 from cache Publishing build scan... https://gradle.com/s/jqzlajjndipv2 Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
