See <https://builds.apache.org/job/beam_PostCommit_Python2/1435/display/redirect>
Changes: ------------------------------------------ [...truncated 6.87 MB...] from hamcrest.library.number.ordering_comparison import greater_than File "/usr/local/lib/python2.7/dist-packages/hamcrest/__init__.py", line 2, in <module> from hamcrest.library import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/__init__.py", line 7, in <module> from hamcrest.library.object import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/__init__.py", line 4, in <module> from .hasproperty import has_properties, has_property File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/hasproperty.py", line 174 ), ^ SyntaxError: invalid syntax apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:43:58.835Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", line 176, in execute op.start() File "apache_beam/runners/worker/operations.py", line 649, in apache_beam.runners.worker.operations.DoOperation.start def start(self): File "apache_beam/runners/worker/operations.py", line 651, in apache_beam.runners.worker.operations.DoOperation.start with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 652, in apache_beam.runners.worker.operations.DoOperation.start super(DoOperation, self).start() File "apache_beam/runners/worker/operations.py", line 261, in apache_beam.runners.worker.operations.Operation.start def start(self): File "apache_beam/runners/worker/operations.py", line 266, in apache_beam.runners.worker.operations.Operation.start self.setup() File "apache_beam/runners/worker/operations.py", line 597, in apache_beam.runners.worker.operations.DoOperation.setup with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 602, in apache_beam.runners.worker.operations.DoOperation.setup pickler.loads(self.spec.serialized_fn)) File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 290, in loads return dill.loads(s) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 275, in loads return load(file, ignore, **kwds) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 270, in load return Unpickler(file, ignore=ignore, **kwds).load() File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 472, in load obj = StockUnpickler.load(self) File "/usr/lib/python2.7/pickle.py", line 864, in load dispatch[key](self) File "/usr/lib/python2.7/pickle.py", line 1139, in load_reduce value = func(*args) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 827, in _import_module return getattr(__import__(module, None, None, [obj]), obj) File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/dataflow/dataflow_exercise_metrics_pipeline.py", line 26, in <module> from hamcrest.library.number.ordering_comparison import greater_than File "/usr/local/lib/python2.7/dist-packages/hamcrest/__init__.py", line 2, in <module> from hamcrest.library import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/__init__.py", line 7, in <module> from hamcrest.library.object import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/__init__.py", line 4, in <module> from .hasproperty import has_properties, has_property File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/hasproperty.py", line 174 ), ^ SyntaxError: invalid syntax apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:01.959Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", line 176, in execute op.start() File "apache_beam/runners/worker/operations.py", line 649, in apache_beam.runners.worker.operations.DoOperation.start def start(self): File "apache_beam/runners/worker/operations.py", line 651, in apache_beam.runners.worker.operations.DoOperation.start with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 652, in apache_beam.runners.worker.operations.DoOperation.start super(DoOperation, self).start() File "apache_beam/runners/worker/operations.py", line 261, in apache_beam.runners.worker.operations.Operation.start def start(self): File "apache_beam/runners/worker/operations.py", line 266, in apache_beam.runners.worker.operations.Operation.start self.setup() File "apache_beam/runners/worker/operations.py", line 597, in apache_beam.runners.worker.operations.DoOperation.setup with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 602, in apache_beam.runners.worker.operations.DoOperation.setup pickler.loads(self.spec.serialized_fn)) File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 290, in loads return dill.loads(s) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 275, in loads return load(file, ignore, **kwds) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 270, in load return Unpickler(file, ignore=ignore, **kwds).load() File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 472, in load obj = StockUnpickler.load(self) File "/usr/lib/python2.7/pickle.py", line 864, in load dispatch[key](self) File "/usr/lib/python2.7/pickle.py", line 1139, in load_reduce value = func(*args) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 827, in _import_module return getattr(__import__(module, None, None, [obj]), obj) File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/dataflow/dataflow_exercise_metrics_pipeline.py", line 26, in <module> from hamcrest.library.number.ordering_comparison import greater_than File "/usr/local/lib/python2.7/dist-packages/hamcrest/__init__.py", line 2, in <module> from hamcrest.library import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/__init__.py", line 7, in <module> from hamcrest.library.object import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/__init__.py", line 4, in <module> from .hasproperty import has_properties, has_property File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/hasproperty.py", line 174 ), ^ SyntaxError: invalid syntax apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.103Z: JOB_MESSAGE_ERROR: Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 647, in do_work work_executor.execute() File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", line 176, in execute op.start() File "apache_beam/runners/worker/operations.py", line 649, in apache_beam.runners.worker.operations.DoOperation.start def start(self): File "apache_beam/runners/worker/operations.py", line 651, in apache_beam.runners.worker.operations.DoOperation.start with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 652, in apache_beam.runners.worker.operations.DoOperation.start super(DoOperation, self).start() File "apache_beam/runners/worker/operations.py", line 261, in apache_beam.runners.worker.operations.Operation.start def start(self): File "apache_beam/runners/worker/operations.py", line 266, in apache_beam.runners.worker.operations.Operation.start self.setup() File "apache_beam/runners/worker/operations.py", line 597, in apache_beam.runners.worker.operations.DoOperation.setup with self.scoped_start_state: File "apache_beam/runners/worker/operations.py", line 602, in apache_beam.runners.worker.operations.DoOperation.setup pickler.loads(self.spec.serialized_fn)) File "/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line 290, in loads return dill.loads(s) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 275, in loads return load(file, ignore, **kwds) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 270, in load return Unpickler(file, ignore=ignore, **kwds).load() File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 472, in load obj = StockUnpickler.load(self) File "/usr/lib/python2.7/pickle.py", line 864, in load dispatch[key](self) File "/usr/lib/python2.7/pickle.py", line 1139, in load_reduce value = func(*args) File "/usr/local/lib/python2.7/dist-packages/dill/_dill.py", line 827, in _import_module return getattr(__import__(module, None, None, [obj]), obj) File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/dataflow/dataflow_exercise_metrics_pipeline.py", line 26, in <module> from hamcrest.library.number.ordering_comparison import greater_than File "/usr/local/lib/python2.7/dist-packages/hamcrest/__init__.py", line 2, in <module> from hamcrest.library import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/__init__.py", line 7, in <module> from hamcrest.library.object import * File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/__init__.py", line 4, in <module> from .hasproperty import has_properties, has_property File "/usr/local/lib/python2.7/dist-packages/hamcrest/library/object/hasproperty.py", line 174 ), ^ SyntaxError: invalid syntax apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.133Z: JOB_MESSAGE_BASIC: Finished operation Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.195Z: JOB_MESSAGE_DEBUG: Executing failure step failure12 apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.224Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: S02:Create/Read+metrics+map_to_common_key+GroupByKey/Reify+GroupByKey/Write failed., The job failed because a work item has failed 4 times. Look in previous log entries for the cause of each one of the 4 failures. For more information, see https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was attempted on these workers: beamapp-jenkins-011312390-01130439-s261-harness-qvz9 Root cause: Work item failed., beamapp-jenkins-011312390-01130439-s261-harness-qvz9 Root cause: Work item failed., beamapp-jenkins-011312390-01130439-s261-harness-qvz9 Root cause: Work item failed., beamapp-jenkins-011312390-01130439-s261-harness-qvz9 Root cause: Work item failed. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.342Z: JOB_MESSAGE_DETAILED: Cleaning up. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.429Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:44:05.450Z: JOB_MESSAGE_BASIC: Stopping worker pool... apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:45:45.655Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:45:45.689Z: JOB_MESSAGE_BASIC: Worker pool stopped. apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-01-13T12:45:45.720Z: JOB_MESSAGE_DEBUG: Tearing down pending resources... apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2020-01-13_04_39_26-5364156032737486672 is in state JOB_STATE_FAILED --------------------- >> end captured logging << --------------------- ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df.xml ---------------------------------------------------------------------- XML: <https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 50 tests in 3468.191s FAILED (SKIP=7, errors=7) Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_59-13171185587624514206?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_19_59-3832180792109917222?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_27_20-9506646128378121881?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_33_57-12146031876224185183?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_40_44-11695822977268896010?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_48_09-1870166801693169056?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_54_39-10851404900430972997?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_04_01-16460923798531359030?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_18_44-11735917190195965509?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_26_03-5883187225693570641?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_33_10-1546804533896155732?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_41_00-11510157133545735896?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_56-2768083405784738800?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_22_46-15859701104301668598?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_59-11787066123898964715?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_15_53-8463049179299713070?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_22_25-303178613940687343?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_29_05-15474682603341178420?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_35_32-12946487695682640076?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_58-7307900601090886834?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_11_17-13267507514025502413?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_19_17-15415948860543823424?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_26_48-1773663012510413088?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_33_10-11564901059680023349?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_39_26-5364156032737486672?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_57-18248404992434871028?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_10_32-11244604918038170186?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_19_10-7732034231804971647?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_25_44-8874892455597151860?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_32_37-15208872552416394941?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_39_19-3885050074063426360?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_59-5932500862889782612?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_11_43-109489897445315536?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_19_17-313050422853472012?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_26_04-10175118867882001268?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_33_09-11361701625944270514?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_39_55-9012773707163888203?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_03_58-14467645309478952190?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_12_31-1828390330113334273?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_22_29-18433148142749951757?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_30_26-17610845474297889503?project=apache-beam-testing Worker logs: https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2020-01-13_04_37_40-14851443028632073129?project=apache-beam-testing > Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Build file '<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'> line: 85 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 6.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 59m 6s 121 actionable tasks: 95 executed, 23 from cache, 3 up-to-date Publishing build scan... https://gradle.com/s/2fadmpht6jpji Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org For additional commands, e-mail: builds-h...@beam.apache.org