See
<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/320/display/redirect>
Changes:
------------------------------------------
[...truncated 470.06 KB...]
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 625, in do_instruction
return getattr(self, request_type)(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 656, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 487, in get
processor = bundle_processor.BundleProcessor(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/bundle_processor.py",
line 903, in __init__
op.setup(self.data_sampler)
File "apache_beam/runners/****/operations.py", line 875, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/****/operations.py", line 925, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/common.py", line 1493, in
apache_beam.runners.common.DoFnRunner.setup
File "apache_beam/runners/common.py", line 1489, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 1533, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 1487, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.DoFnInvoker.invoke_setup
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 873, in setup
self._model = self._load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 849, in _load_model
model = self._shared_model_handle.acquire(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 305, in acquire
return _shared_map.acquire(self._key, constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 246, in acquire
result = control_block.acquire(constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 139, in acquire
result = constructor_fn()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 834, in load
model = self._model_handler.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 270, in load_model
return self._unkeyed.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 501, in load_model
model, device = _load_model(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 119, in _load_model
raise e
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 102, in _load_model
model.load_state_dict(state_dict)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/torch/nn/modules/module.py",
line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for BertForMaskedLM:
Unexpected key(s) in state_dict: "bert.embeddings.position_ids".
[while running 'PyTorchRunInference/BeamML_RunInference-ptransform-81']
Worker ID: benchmark-tests-pytorch-l-07261151-vogd-harness-3dh4,
Root cause: Traceback (most recent call last):
File "apache_beam/runners/common.py", line 1487, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.DoFnInvoker.invoke_setup
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 873, in setup
self._model = self._load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 849, in _load_model
model = self._shared_model_handle.acquire(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 305, in acquire
return _shared_map.acquire(self._key, constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 246, in acquire
result = control_block.acquire(constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 139, in acquire
result = constructor_fn()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 834, in load
model = self._model_handler.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 270, in load_model
return self._unkeyed.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 501, in load_model
model, device = _load_model(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 119, in _load_model
raise e
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 102, in _load_model
model.load_state_dict(state_dict)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/torch/nn/modules/module.py",
line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for BertForMaskedLM:
Unexpected key(s) in state_dict: "bert.embeddings.position_ids".
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 297, in _execute
response = task()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 372, in <lambda>
lambda: self.create_****().do_instruction(request), request)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 625, in do_instruction
return getattr(self, request_type)(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 656, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 487, in get
processor = bundle_processor.BundleProcessor(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/bundle_processor.py",
line 903, in __init__
op.setup(self.data_sampler)
File "apache_beam/runners/****/operations.py", line 875, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/****/operations.py", line 925, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/common.py", line 1493, in
apache_beam.runners.common.DoFnRunner.setup
File "apache_beam/runners/common.py", line 1489, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 1533, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 1487, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.DoFnInvoker.invoke_setup
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 873, in setup
self._model = self._load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 849, in _load_model
model = self._shared_model_handle.acquire(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 305, in acquire
return _shared_map.acquire(self._key, constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 246, in acquire
result = control_block.acquire(constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 139, in acquire
result = constructor_fn()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 834, in load
model = self._model_handler.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 270, in load_model
return self._unkeyed.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 501, in load_model
model, device = _load_model(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 119, in _load_model
raise e
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 102, in _load_model
model.load_state_dict(state_dict)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/torch/nn/modules/module.py",
line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for BertForMaskedLM:
Unexpected key(s) in state_dict: "bert.embeddings.position_ids".
[while running 'PyTorchRunInference/BeamML_RunInference-ptransform-81']
Worker ID: benchmark-tests-pytorch-l-07261151-vogd-harness-vvxg
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T18:58:39.339Z:
JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T18:58:39.495Z:
JOB_MESSAGE_DEBUG: Starting **** pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T18:58:39.519Z:
JOB_MESSAGE_BASIC: Stopping **** pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T19:01:08.236Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized **** pool from 250 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T19:01:08.275Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2023-07-26T19:01:08.306Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2023-07-26_11_51_07-7589597009195410199 is in state JOB_STATE_FAILED
ERROR:apache_beam.runners.dataflow.dataflow_runner:Console URL:
https://console.cloud.google.com/dataflow/jobs/<RegionId>/2023-07-26_11_51_07-7589597009195410199?project=<ProjectId>
Traceback (most recent call last):
File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/usr/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File
"<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/testing/benchmarks/inference/pytorch_language_modeling_benchmarks.py",>
line 42, in <module>
PytorchLanguageModelingBenchmarkTest().run()
File
"<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/testing/load_tests/load_test.py",>
line 148, in run
self.test()
File
"<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/testing/benchmarks/inference/pytorch_language_modeling_benchmarks.py",>
line 35, in test
self.result = pytorch_language_modeling.run(
File
"<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/examples/inference/pytorch_language_modeling.py",>
line 210, in run
result.wait_until_finish()
File
"<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py",>
line 756, in wait_until_finish
raise DataflowRuntimeException(
apache_beam.runners.dataflow.dataflow_runner.DataflowRuntimeException: Dataflow
pipeline failed. State: FAILED, Error:
Traceback (most recent call last):
File "apache_beam/runners/common.py", line 1487, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.DoFnInvoker.invoke_setup
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 873, in setup
self._model = self._load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 849, in _load_model
model = self._shared_model_handle.acquire(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 305, in acquire
return _shared_map.acquire(self._key, constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 246, in acquire
result = control_block.acquire(constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 139, in acquire
result = constructor_fn()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 834, in load
model = self._model_handler.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 270, in load_model
return self._unkeyed.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 501, in load_model
model, device = _load_model(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 119, in _load_model
raise e
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 102, in _load_model
model.load_state_dict(state_dict)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/torch/nn/modules/module.py",
line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for BertForMaskedLM:
Unexpected key(s) in state_dict: "bert.embeddings.position_ids".
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 297, in _execute
response = task()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 372, in <lambda>
lambda: self.create_****().do_instruction(request), request)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 625, in do_instruction
return getattr(self, request_type)(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 656, in process_bundle
bundle_processor = self.bundle_processor_cache.get(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/sdk_****.py",
line 487, in get
processor = bundle_processor.BundleProcessor(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/runners/****/bundle_processor.py",
line 903, in __init__
op.setup(self.data_sampler)
File "apache_beam/runners/****/operations.py", line 875, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/****/operations.py", line 925, in
apache_beam.runners.****.operations.DoOperation.setup
File "apache_beam/runners/common.py", line 1493, in
apache_beam.runners.common.DoFnRunner.setup
File "apache_beam/runners/common.py", line 1489, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 1533, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
File "apache_beam/runners/common.py", line 1487, in
apache_beam.runners.common.DoFnRunner._invoke_lifecycle_method
File "apache_beam/runners/common.py", line 553, in
apache_beam.runners.common.DoFnInvoker.invoke_setup
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 873, in setup
self._model = self._load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 849, in _load_model
model = self._shared_model_handle.acquire(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 305, in acquire
return _shared_map.acquire(self._key, constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 246, in acquire
result = control_block.acquire(constructor_fn, tag)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/utils/shared.py",
line 139, in acquire
result = constructor_fn()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 834, in load
model = self._model_handler.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/base.py",
line 270, in load_model
return self._unkeyed.load_model()
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 501, in load_model
model, device = _load_model(
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 119, in _load_model
raise e
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/apache_beam/ml/inference/pytorch_inference.py",
line 102, in _load_model
model.load_state_dict(state_dict)
File
"/opt/apache/beam-venv/beam-venv-****-sdk-0-0/lib/python3.8/site-packages/torch/nn/modules/module.py",
line 2041, in load_state_dict
raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
RuntimeError: Error(s) in loading state_dict for BertForMaskedLM:
Unexpected key(s) in state_dict: "bert.embeddings.position_ids".
[while running 'PyTorchRunInference/BeamML_RunInference-ptransform-81']
> Task :sdks:python:apache_beam:testing:load_tests:run FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_Inference_Python_Benchmarks_Dataflow/ws/src/sdks/python/apache_beam/testing/load_tests/build.gradle'>
line: 63
* What went wrong:
Execution failed for task ':sdks:python:apache_beam:testing:load_tests:run'.
> error occurred
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.6.2/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 15m 29s
15 actionable tasks: 4 executed, 11 up-to-date
Publishing build scan...
https://ge.apache.org/s/7gkp6a4q2fkfc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]