See <https://builds.apache.org/job/beam_PostCommit_Python36/2464/display/redirect?page=changes>
Changes: [kcweaver] [BEAM-10048] Clean up release guide. [kcweaver] Remove checklist from build_release_candidate.sh. [github] Include additional instructions for staging repository. [heejong] [BEAM-10052] check hash and avoid duplicated artifacts [github] Clarify the usage of sign_hash_python_wheels.sh. [github] Website build: Remove -u root (#11625) [ankurgoenka] [BEAM-10060] Upgrading Python containers to beam-master-20200521 [github] [BEAM-9722] added SnowflakeIO with Read operation (#11360) ------------------------------------------ [...truncated 10.99 MB...] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.237Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> During handling of the above exception, another exception occurred: Traceback (most recent call last): File "/usr/local/lib/python3.6/site-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python3.6/site-packages/dataflow_worker/executor.py", line 179, in execute op.start() File "dataflow_worker/shuffle_operations.py", line 63, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 64, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 79, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 80, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "dataflow_worker/shuffle_operations.py", line 84, in dataflow_worker.shuffle_operations.GroupedShuffleReadOperation.start File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "dataflow_worker/shuffle_operations.py", line 261, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "dataflow_worker/shuffle_operations.py", line 268, in dataflow_worker.shuffle_operations.BatchGroupAlsoByWindowsOperation.process File "apache_beam/runners/worker/operations.py", line 332, in apache_beam.runners.worker.operations.Operation.output File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 726, in apache_beam.runners.common.PerWindowInvoker.invoke_process File "apache_beam/runners/common.py", line 812, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1030, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 553, in apache_beam.runners.common.SimpleInvoker.invoke_process File "apache_beam/runners/common.py", line 1122, in apache_beam.runners.common._OutputProcessor.process_outputs File "apache_beam/runners/worker/operations.py", line 195, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive File "apache_beam/runners/worker/operations.py", line 670, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/worker/operations.py", line 671, in apache_beam.runners.worker.operations.DoOperation.process File "apache_beam/runners/common.py", line 963, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 1045, in apache_beam.runners.common.DoFnRunner._reraise_augmented File "/usr/local/lib/python3.6/site-packages/future/utils/__init__.py", line 446, in raise_with_traceback raise exc.with_traceback(traceback) File "apache_beam/runners/common.py", line 961, in apache_beam.runners.common.DoFnRunner.process File "apache_beam/runners/common.py", line 554, in apache_beam.runners.common.SimpleInvoker.invoke_process File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/transforms/core.py",> line 1511, in <lambda> wrapper = lambda x: [fn(x)] File "<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/apache_beam/testing/util.py",> line 218, in _matches hamcrest_assert(actual, contains_inanyorder(*expected_list)) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 44, in assert_that _assert_match(actual=arg1, matcher=arg2, reason=arg3) File "/usr/local/lib/python3.6/site-packages/hamcrest/core/assert_that.py", line 60, in _assert_match raise AssertionError(description) RuntimeError: AssertionError: Expected: a sequence over [(a sequence containing 'bicycle' and a sequence containing 'dinosaur')] in any order but: not matched: <['land vehicle', 'animal']> [while running 'assert_that/Match'] apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.265Z: JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.330Z: JOB_MESSAGE_DEBUG: Executing failure step failure40 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.361Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S10:assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-052201172-05211817-5vhj-harness-kjr1 Root cause: Work item failed., beamapp-jenkins-052201172-05211817-5vhj-harness-kjr1 Root cause: Work item failed., beamapp-jenkins-052201172-05211817-5vhj-harness-kjr1 Root cause: Work item failed., beamapp-jenkins-052201172-05211817-5vhj-harness-kjr1 Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.468Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.522Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:24:45.543Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:25:53.709Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:25:53.754Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-05-22T01:25:53.781Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-05-21_18_17_41-9667440331184460058 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_27-1453689680225134276?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_38_33-2372675391186326600?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_46_58-4103044902761724863?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_54_31-2038814830613990862?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_04_14-510752614688390478?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_12_03-10217774039914089654?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_19_36-4882598750979773544?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_20-1898995260429128448?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_43_23-4132216443832392106?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_52_47-18374486942072703513?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_01_14-4632169507646993379?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_10_29-16337794156655277307?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_18_35-5482358075608978682?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_23-8533663115769468070?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_36_07-534465362428545852?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_44_30-4086431629044710842?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_53_10-7724246973241303773?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_01_24-1618473637860134796?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_09_09-16486298983763691107?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_17_41-9667440331184460058?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_26-15900354906369780910?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_42_17-10370199012498655010?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_53_14-18064716053278189648?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_02_08-16983457052383762050?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_10_36-12318039233043665370?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_18_57-5125486739474613919?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_13-13469852765890388102?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_32_34-15602201886042140460?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_41_52-10865856606320743886?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_49_39-17318644790333491299?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_57_23-3918065051443119579?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_05_56-6151665813996556713?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_14_38-4463704976319049007?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_22_38-13822311692251488740?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_16-1710711633768133245?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_32_56-1487931100352079671?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_42_39-578418922869951826?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_51_27-18385324758812009815?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_00_30-17177563505029169639?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_08_48-11168141635498427022?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_18_17-3572782020389248587?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_20-1023703932187900219?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_32_53-5060650838433584064?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_41_57-17161235555692457931?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_51_07-13799263833475662196?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_59_40-99168784473381882?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_07_47-16249513530251596558?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_16_45-6988985040155999212?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_23_20-11262820470707097301?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_34_22-10480083881604630870?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_44_54-7288709771548198907?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_17_53_59-11377012474137491419?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_01_58-644063237581396867?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_19_28-15787612253939731714?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobs/us-central1/2020-05-21_18_27_48-16001623698091794926?project=apache-beam-testing ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py36.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 63 tests in 4450.804s FAILED (SKIP=7, errors=1) > Task :sdks:python:test-suites:dataflow:py36:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://builds.apache.org/job/beam_PostCommit_Python36/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 50 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py36:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 16m 33s 86 actionable tasks: 63 executed, 23 from cache Publishing build scan... https://gradle.com/s/z7yvf7rauqzog Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
