See 
<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/216/display/redirect?page=changes>

Changes:

[iemejia] [BEAM-7092] Fix invalid import of Guava coming from transitive Spark 
dep


------------------------------------------
[...truncated 141.33 KB...]
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 965, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 507, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    self.output_processor.process_outputs(
  File "apache_beam/runners/common.py", line 1053, in 
apache_beam.runners.common._OutputProcessor.process_outputs
    self.main_receivers.receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 980, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise_with_traceback(new_exn)
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 508, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    windowed_value, self.process_method(windowed_value.value))
  File "apache_beam/transforms/core.py", line 1455, in <lambda>
    wrapper = lambda x: [fn(x)]
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']

Note: imports, functions and other variables defined in the global context of 
your __main__ file of your Dataflow pipeline are, by default, not available in 
the worker execution environment, and such references will cause a NameError, 
unless the --save_main_session pipeline option is set to True. Please see 
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional 
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:48.556Z: 
JOB_MESSAGE_ERROR: Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", 
line 647, in do_work
    work_executor.execute()
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", 
line 176, in execute
    op.start()
  File "dataflow_worker/native_operations.py", line 38, in 
dataflow_worker.native_operations.NativeReadOperation.start
    def start(self):
  File "dataflow_worker/native_operations.py", line 39, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.scoped_start_state:
  File "dataflow_worker/native_operations.py", line 44, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.spec.source.reader() as reader:
  File "dataflow_worker/native_operations.py", line 54, in 
dataflow_worker.native_operations.NativeReadOperation.start
    self.output(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 326, in 
apache_beam.runners.worker.operations.Operation.output
    cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 965, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 507, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    self.output_processor.process_outputs(
  File "apache_beam/runners/common.py", line 1053, in 
apache_beam.runners.common._OutputProcessor.process_outputs
    self.main_receivers.receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 980, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise_with_traceback(new_exn)
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 508, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    windowed_value, self.process_method(windowed_value.value))
  File "apache_beam/transforms/core.py", line 1455, in <lambda>
    wrapper = lambda x: [fn(x)]
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']

Note: imports, functions and other variables defined in the global context of 
your __main__ file of your Dataflow pipeline are, by default, not available in 
the worker execution environment, and such references will cause a NameError, 
unless the --save_main_session pipeline option is set to True. Please see 
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional 
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:48.725Z: 
JOB_MESSAGE_ERROR: Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", 
line 647, in do_work
    work_executor.execute()
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", 
line 176, in execute
    op.start()
  File "dataflow_worker/native_operations.py", line 38, in 
dataflow_worker.native_operations.NativeReadOperation.start
    def start(self):
  File "dataflow_worker/native_operations.py", line 39, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.scoped_start_state:
  File "dataflow_worker/native_operations.py", line 44, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.spec.source.reader() as reader:
  File "dataflow_worker/native_operations.py", line 54, in 
dataflow_worker.native_operations.NativeReadOperation.start
    self.output(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 326, in 
apache_beam.runners.worker.operations.Operation.output
    cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 965, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 507, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    self.output_processor.process_outputs(
  File "apache_beam/runners/common.py", line 1053, in 
apache_beam.runners.common._OutputProcessor.process_outputs
    self.main_receivers.receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 980, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise_with_traceback(new_exn)
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 508, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    windowed_value, self.process_method(windowed_value.value))
  File "apache_beam/transforms/core.py", line 1455, in <lambda>
    wrapper = lambda x: [fn(x)]
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']

Note: imports, functions and other variables defined in the global context of 
your __main__ file of your Dataflow pipeline are, by default, not available in 
the worker execution environment, and such references will cause a NameError, 
unless the --save_main_session pipeline option is set to True. Please see 
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional 
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:48.838Z: 
JOB_MESSAGE_ERROR: Traceback (most recent call last):
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", 
line 647, in do_work
    work_executor.execute()
  File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", 
line 176, in execute
    op.start()
  File "dataflow_worker/native_operations.py", line 38, in 
dataflow_worker.native_operations.NativeReadOperation.start
    def start(self):
  File "dataflow_worker/native_operations.py", line 39, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.scoped_start_state:
  File "dataflow_worker/native_operations.py", line 44, in 
dataflow_worker.native_operations.NativeReadOperation.start
    with self.spec.source.reader() as reader:
  File "dataflow_worker/native_operations.py", line 54, in 
dataflow_worker.native_operations.NativeReadOperation.start
    self.output(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 326, in 
apache_beam.runners.worker.operations.Operation.output
    cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 965, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 507, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    self.output_processor.process_outputs(
  File "apache_beam/runners/common.py", line 1053, in 
apache_beam.runners.common._OutputProcessor.process_outputs
    self.main_receivers.receive(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 190, in 
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
    self.consumer.process(windowed_value)
  File "apache_beam/runners/worker/operations.py", line 678, in 
apache_beam.runners.worker.operations.DoOperation.process
    with self.scoped_process_state:
  File "apache_beam/runners/worker/operations.py", line 679, in 
apache_beam.runners.worker.operations.DoOperation.process
    delayed_application = self.dofn_runner.process(o)
  File "apache_beam/runners/common.py", line 907, in 
apache_beam.runners.common.DoFnRunner.process
    self._reraise_augmented(exn)
  File "apache_beam/runners/common.py", line 980, in 
apache_beam.runners.common.DoFnRunner._reraise_augmented
    raise_with_traceback(new_exn)
  File "apache_beam/runners/common.py", line 905, in 
apache_beam.runners.common.DoFnRunner.process
    return self.do_fn_invoker.invoke_process(windowed_value)
  File "apache_beam/runners/common.py", line 508, in 
apache_beam.runners.common.SimpleInvoker.invoke_process
    windowed_value, self.process_method(windowed_value.value))
  File "apache_beam/transforms/core.py", line 1455, in <lambda>
    wrapper = lambda x: [fn(x)]
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 84, in format_record
NameError: global name 'base64' is not defined [while running 'Format']

Note: imports, functions and other variables defined in the global context of 
your __main__ file of your Dataflow pipeline are, by default, not available in 
the worker execution environment, and such references will cause a NameError, 
unless the --save_main_session pipeline option is set to True. Please see 
https://cloud.google.com/dataflow/faq#how-do-i-handle-nameerrors for additional 
documentation on configuring your worker execution environment.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:48.866Z: 
JOB_MESSAGE_BASIC: Executing BigQuery import job 
"dataflow_job_18217222878073182164". You can check its status with the bq tool: 
"bq show -j --project_id=apache-beam-testing dataflow_job_18217222878073182164".
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:50.789Z: 
JOB_MESSAGE_BASIC: Finished operation Produce rows+Count 
messages+Format+Measure time+Write to BigQuery/WriteToBigQuery/NativeWrite
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:50.861Z: 
JOB_MESSAGE_DEBUG: Executing failure step failure2
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:50.897Z: 
JOB_MESSAGE_ERROR: Workflow failed. Causes: S01:Produce rows+Count 
messages+Format+Measure time+Write to BigQuery/WriteToBigQuery/NativeWrite 
failed., The job failed because a work item has failed 4 times. Look in 
previous log entries for the cause of each one of the 4 failures. For more 
information, see https://cloud.google.com/dataflow/docs/guides/common-errors. 
The work item was attempted on these workers: 
  performance-tests-bqio-wr-02100753-2bdt-harness-4rxt
      Root cause: Work item failed.,
  performance-tests-bqio-wr-02100753-2bdt-harness-4rxt
      Root cause: Work item failed.,
  performance-tests-bqio-wr-02100753-2bdt-harness-bqpv
      Root cause: Work item failed.,
  performance-tests-bqio-wr-02100753-2bdt-harness-bqpv
      Root cause: Work item failed.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:51.029Z: 
JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:51.087Z: 
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:57:51.122Z: 
JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:59:15.579Z: 
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 5 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:59:15.617Z: 
JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-10T15:59:15.651Z: 
JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job 
2020-02-10_07_53_20-10582245658953125328 is in state JOB_STATE_FAILED
INFO:apache_beam.io.gcp.tests.utils:Clean up a BigQuery table with project: 
apache-beam-testing, dataset: beam_performance, table: bqio_write_10GB.
Traceback (most recent call last):
  File "/usr/lib/python2.7/runpy.py", line 174, in _run_module_as_main
    "__main__", fname, loader, pkg_name)
  File "/usr/lib/python2.7/runpy.py", line 72, in _run_code
    exec code in run_globals
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 108, in <module>
    BigQueryWritePerfTest().run()
  File "apache_beam/testing/load_tests/load_test.py", line 108, in run
    self.cleanup()
  File 
"<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/io/gcp/bigquery_write_perf_test.py";,>
 line 103, in cleanup
    self.project_id, self.output_dataset, self.output_table)
  File "apache_beam/utils/retry.py", line 234, in wrapper
    return fun(*args, **kwargs)
  File "apache_beam/io/gcp/tests/utils.py", line 106, in delete_bq_table
    raise GcpTestIOError('BigQuery table does not exist: %s' % table_ref)
apache_beam.io.gcp.tests.utils.GcpTestIOError: BigQuery table does not exist: 
TableReference(DatasetReference(u'apache-beam-testing', 'beam_performance'), 
'bqio_write_10GB')

> Task :sdks:python:apache_beam:testing:load_tests:run FAILED

FAILURE: Build failed with an exception.

* Where:
Build file 
'<https://builds.apache.org/job/beam_BiqQueryIO_Write_Performance_Test_Python_Batch/ws/src/sdks/python/apache_beam/testing/load_tests/build.gradle'>
 line: 51

* What went wrong:
Execution failed for task ':sdks:python:apache_beam:testing:load_tests:run'.
> error occurred

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 7m 8s
5 actionable tasks: 5 executed

Publishing build scan...
https://gradle.com/s/ajxfxslu2ns6q

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: builds-unsubscr...@beam.apache.org
For additional commands, e-mail: builds-h...@beam.apache.org

Reply via email to