See
<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/213/display/redirect?page=changes>
Changes:
[heejong] [BEAM-9230] Enable CrossLanguageValidateRunner test for Spark runner
[daniel.o.programmer] [BEAM-3301] Small cleanup to FullValue code.
[apilloud] [BEAM-8630] Add logical types, make public
[amyrvold] [BEAM-9261] Add LICENSE and NOTICE to Docker images
[github] [BEAM-8951] Stop using nose in load tests (#10435)
[chadrik] [BEAM-7746] Add typing for try_split
------------------------------------------
[...truncated 124.43 KB...]
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 984, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 508, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "apache_beam/transforms/core.py", line 1455, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']
Note: imports, functions and other variables defined in the global context of
your __main__ file of your Dataflow pipeline are, by default, not available in
the worker execution environment, and such references will cause a NameError,
unless the --save_main_session pipeline option is set to True. Please see
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:39.629Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py",
line 176, in execute
op.start()
File "dataflow_worker/native_operations.py", line 38, in
dataflow_worker.native_operations.NativeReadOperation.start
def start(self):
File "dataflow_worker/native_operations.py", line 39, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.scoped_start_state:
File "dataflow_worker/native_operations.py", line 44, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.spec.source.reader() as reader:
File "dataflow_worker/native_operations.py", line 54, in
dataflow_worker.native_operations.NativeReadOperation.start
self.output(windowed_value)
File "apache_beam/runners/worker/operations.py", line 326, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 969, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 507, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1057, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 984, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 508, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "apache_beam/transforms/core.py", line 1455, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']
Note: imports, functions and other variables defined in the global context of
your __main__ file of your Dataflow pipeline are, by default, not available in
the worker execution environment, and such references will cause a NameError,
unless the --save_main_session pipeline option is set to True. Please see
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:39.990Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py",
line 176, in execute
op.start()
File "dataflow_worker/native_operations.py", line 38, in
dataflow_worker.native_operations.NativeReadOperation.start
def start(self):
File "dataflow_worker/native_operations.py", line 39, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.scoped_start_state:
File "dataflow_worker/native_operations.py", line 44, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.spec.source.reader() as reader:
File "dataflow_worker/native_operations.py", line 54, in
dataflow_worker.native_operations.NativeReadOperation.start
self.output(windowed_value)
File "apache_beam/runners/worker/operations.py", line 326, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 969, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 507, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1057, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 984, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 508, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "apache_beam/transforms/core.py", line 1455, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']
Note: imports, functions and other variables defined in the global context of
your __main__ file of your Dataflow pipeline are, by default, not available in
the worker execution environment, and such references will cause a NameError,
unless the --save_main_session pipeline option is set to True. Please see
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:40.113Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 647, in do_work
work_executor.execute()
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py",
line 176, in execute
op.start()
File "dataflow_worker/native_operations.py", line 38, in
dataflow_worker.native_operations.NativeReadOperation.start
def start(self):
File "dataflow_worker/native_operations.py", line 39, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.scoped_start_state:
File "dataflow_worker/native_operations.py", line 44, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.spec.source.reader() as reader:
File "dataflow_worker/native_operations.py", line 54, in
dataflow_worker.native_operations.NativeReadOperation.start
self.output(windowed_value)
File "apache_beam/runners/worker/operations.py", line 326, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 969, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 507, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1057, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 190, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 682, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 683, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 904, in
apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 911, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 984, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 909, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 508, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "apache_beam/transforms/core.py", line 1455, in <lambda>
wrapper = lambda x: [fn(x)]
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']
Note: imports, functions and other variables defined in the global context of
your __main__ file of your Dataflow pipeline are, by default, not available in
the worker execution environment, and such references will cause a NameError,
unless the --save_main_session pipeline option is set to True. Please see
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:40.137Z:
JOB_MESSAGE_BASIC: Executing BigQuery import job
"dataflow_job_4985232681084637294". You can check its status with the bq tool:
"bq show -j --project_id=apache-beam-testing dataflow_job_4985232681084637294".
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:41.785Z:
JOB_MESSAGE_BASIC: Finished operation Produce rows+Count
messages+Format+Measure time+Write to BigQuery/WriteToBigQuery/NativeWrite
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:41.908Z:
JOB_MESSAGE_DEBUG: Executing failure step failure2
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:41.936Z:
JOB_MESSAGE_ERROR: Workflow failed. Causes: S01:Produce rows+Count
messages+Format+Measure time+Write to BigQuery/WriteToBigQuery/NativeWrite
failed., The job failed because a work item has failed 4 times. Look in
previous log entries for the cause of each one of the 4 failures. For more
information, see https://cloud.google.com/dataflow/docs/guides/common-errors.
The work item was attempted on these workers:
performance-tests-bqio-wr-02070753-o59b-harness-t8fg
Root cause: Work item failed.,
performance-tests-bqio-wr-02070753-o59b-harness-t8fg
Root cause: Work item failed.,
performance-tests-bqio-wr-02070753-o59b-harness-t8fg
Root cause: Work item failed.,
performance-tests-bqio-wr-02070753-o59b-harness-6dj5
Root cause: Work item failed.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:42.042Z:
JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:42.079Z:
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:57:42.107Z:
JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:59:32.921Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 5 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:59:32.973Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-07T15:59:33.008Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2020-02-07_07_53_14-8367366266055844291 is in state JOB_STATE_FAILED
INFO:apache_beam.io.gcp.tests.utils:Clean up a BigQuery table with project:
apache-beam-testing, dataset: beam_performance, table: bqio_write_10GB.
Traceback (most recent call last):
File "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main
"__main__", fname, loader, pkg_name)
File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
exec code in run_globals
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 108, in <module>
BigQueryWritePerfTest().run()
File "apache_beam/testing/load_tests/load_test.py", line 108, in run
self.cleanup()
File
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py",>
line 103, in cleanup
self.project_id, self.output_dataset, self.output_table)
File "apache_beam/utils/retry.py", line 234, in wrapper
return fun(*args, **kwargs)
File "apache_beam/io/gcp/tests/utils.py", line 106, in delete_bq_table
raise GcpTestIOError('BigQuery table does not exist: %s' % table_ref)
apache_beam.io.gcp.tests.utils.GcpTestIOError: BigQuery table does not exist:
TableReference(DatasetReference(u'apache-beam-testing', 'beam_performance'),
'bqio_write_10GB')
> Task :sdks:python:apache_beam:testing:load_tests:run FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/testing/load_tests/build.gradle'>
line: 51
* What went wrong:
Execution failed for task ':sdks:python:apache_beam:testing:load_tests:run'.
> error occurred
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 7m 26s
5 actionable tasks: 5 executed
Publishing build scan...
https://gradle.com/s/ib2u6fmzj4xtu
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]