See
<https://ci-beam.apache.org/job/beam_PostCommit_Python2/2756/display/redirect?page=changes>
Changes:
[Robert Bradshaw] Simplify common patterns for pandas methods.
[Robert Bradshaw] Use new infrastructure to simplify pandas implementation.
------------------------------------------
[...truncated 17.36 MB...]
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1294, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 1213, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 570, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 405, in process
self._flush_batch()
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 422, in _flush_batch
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
File "/usr/local/lib/python2.7/site-packages/apache_beam/utils/retry.py",
line 236, in wrapper
return fun(*args, **kwargs)
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 385, in write_mutations
self._batch.commit()
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore/batch.py", line
274, in commit
self._commit()
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore/batch.py", line
250, in _commit
self.project, mode, self._mutations, transaction=self._id
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore_v1/gapic/datastore_client.py",
line 501, in commit
request, retry=retry, timeout=timeout, metadata=metadata
File
"/usr/local/lib/python2.7/site-packages/google/api_core/gapic_v1/method.py",
line 143, in __call__
return wrapped_func(*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/google/api_core/retry.py", line
286, in retry_wrapped_func
on_error=on_error,
File "/usr/local/lib/python2.7/site-packages/google/api_core/retry.py", line
184, in retry_target
return target()
File "/usr/local/lib/python2.7/site-packages/google/api_core/timeout.py",
line 214, in func_with_timeout
return func(*args, **kwargs)
File
"/usr/local/lib/python2.7/site-packages/google/api_core/grpc_helpers.py", line
59, in error_remapped_callable
six.raise_from(exceptions.from_grpc_error(exc), exc)
File "/usr/local/lib/python2.7/site-packages/six.py", line 738, in raise_from
raise value
NotFound: 404 The project apache-beam-testing does not exist or it does not
contain an active Cloud Datastore or Cloud Firestore database. Please visit
http://console.cloud.google.com to create a project or
https://console.cloud.google.com/datastore/setup?project=apache-beam-testing to
add a Cloud Datastore or Cloud Firestore database. Note that Cloud Datastore or
Cloud Firestore always have an associated App Engine app and this app must not
be disabled. [while running 'Write to Datastore/Write Batch to Datastore']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.485Z:
JOB_MESSAGE_ERROR: Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/dataflow_worker/batchworker.py",
line 638, in do_work
work_executor.execute()
File "/usr/local/lib/python2.7/site-packages/dataflow_worker/executor.py",
line 179, in execute
op.start()
File "dataflow_worker/native_operations.py", line 38, in
dataflow_worker.native_operations.NativeReadOperation.start
def start(self):
File "dataflow_worker/native_operations.py", line 39, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.scoped_start_state:
File "dataflow_worker/native_operations.py", line 44, in
dataflow_worker.native_operations.NativeReadOperation.start
with self.spec.source.reader() as reader:
File "dataflow_worker/native_operations.py", line 54, in
dataflow_worker.native_operations.NativeReadOperation.start
self.output(windowed_value)
File "apache_beam/runners/worker/operations.py", line 332, in
apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 1215, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1279, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 1213, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 569, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1371, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 1215, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1279, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 1213, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 569, in
apache_beam.runners.common.SimpleInvoker.invoke_process
self.output_processor.process_outputs(
File "apache_beam/runners/common.py", line 1371, in
apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 195, in
apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 670, in
apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 671, in
apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_runner.process(o)
File "apache_beam/runners/common.py", line 1215, in
apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 1294, in
apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 1213, in
apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 570, in
apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 405, in process
self._flush_batch()
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 422, in _flush_batch
throttle_delay=util.WRITE_BATCH_TARGET_LATENCY_MS // 1000)
File "/usr/local/lib/python2.7/site-packages/apache_beam/utils/retry.py",
line 236, in wrapper
return fun(*args, **kwargs)
File
"/usr/local/lib/python2.7/site-packages/apache_beam/io/gcp/datastore/v1new/datastoreio.py",
line 385, in write_mutations
self._batch.commit()
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore/batch.py", line
274, in commit
self._commit()
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore/batch.py", line
250, in _commit
self.project, mode, self._mutations, transaction=self._id
File
"/usr/local/lib/python2.7/site-packages/google/cloud/datastore_v1/gapic/datastore_client.py",
line 501, in commit
request, retry=retry, timeout=timeout, metadata=metadata
File
"/usr/local/lib/python2.7/site-packages/google/api_core/gapic_v1/method.py",
line 143, in __call__
return wrapped_func(*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/google/api_core/retry.py", line
286, in retry_wrapped_func
on_error=on_error,
File "/usr/local/lib/python2.7/site-packages/google/api_core/retry.py", line
184, in retry_target
return target()
File "/usr/local/lib/python2.7/site-packages/google/api_core/timeout.py",
line 214, in func_with_timeout
return func(*args, **kwargs)
File
"/usr/local/lib/python2.7/site-packages/google/api_core/grpc_helpers.py", line
59, in error_remapped_callable
six.raise_from(exceptions.from_grpc_error(exc), exc)
File "/usr/local/lib/python2.7/site-packages/six.py", line 738, in raise_from
raise value
NotFound: 404 The project apache-beam-testing does not exist or it does not
contain an active Cloud Datastore or Cloud Firestore database. Please visit
http://console.cloud.google.com to create a project or
https://console.cloud.google.com/datastore/setup?project=apache-beam-testing to
add a Cloud Datastore or Cloud Firestore database. Note that Cloud Datastore or
Cloud Firestore always have an associated App Engine app and this app must not
be disabled. [while running 'Write to Datastore/Write Batch to Datastore']
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.519Z:
JOB_MESSAGE_BASIC: Finished operation Input/Read+To String+To Entity+Write to
Datastore/Write Batch to Datastore
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.613Z:
JOB_MESSAGE_DEBUG: Executing failure step failure2
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.648Z:
JOB_MESSAGE_ERROR: Workflow failed. Causes: S01:Input/Read+To String+To
Entity+Write to Datastore/Write Batch to Datastore failed., The job failed
because a work item has failed 4 times. Look in previous log entries for the
cause of each one of the 4 failures. For more information, see
https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was
attempted on these workers:
beamapp-jenkins-080600150-08051715-xmjm-harness-sw54
Root cause: Work item failed.,
beamapp-jenkins-080600150-08051715-xmjm-harness-sw54
Root cause: Work item failed.,
beamapp-jenkins-080600150-08051715-xmjm-harness-sw54
Root cause: Work item failed.,
beamapp-jenkins-080600150-08051715-xmjm-harness-sw54
Root cause: Work item failed.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.792Z:
JOB_MESSAGE_DETAILED: Cleaning up.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.849Z:
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:21:44.911Z:
JOB_MESSAGE_BASIC: Stopping worker pool...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:22:40.688Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:22:40.748Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-08-06T00:22:40.789Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-08-05_17_15_18-18386733557370249895 is in state JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py27.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 64 tests in 3689.558s
FAILED (SKIP=7, errors=2)
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_38-10947139442459873162?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_11_27-14261238861616940815?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_19_30-15356733144930083892?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_27_37-17249029016674938492?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_35_37-9956106912298129902?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_43_09-6527066341955972249?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_49_55-4088709395794076468?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_57_20-16416553895878016755?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_45-17609862822102115556?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_17_21-8713001915879004314?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_24_43-8221838032221904138?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_31_48-10975587297045650817?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_39_21-3444208480565234265?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_46_27-8993236031228379371?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_41-14953185204710144561?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_23_56-352796265860370191?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_31_42-1770233378714075425?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_38_42-16728470029728880237?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_46_08-12522828650191253779?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_42-4901531456511914537?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_15_18-18386733557370249895?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_23_06-12094042145525236810?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_30_37-7260744794775205697?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_38_10-14100264253326043434?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_44_30-7753068911512759119?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_51_12-15960884607391269789?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_37-4661824961166752915?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_20_28-10023313393841901868?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_28_14-10440488243313553593?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_36_09-5882571111880907052?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_52_40-9598119953147839429?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_41-11329188851797487421?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_10_48-17148190037263459038?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_20_12-10712071310054175731?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_29_07-10412493654174725994?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_36_57-2288095126757243418?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_44_51-17091303725499482408?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_52_26-9366589477659381444?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_41-72887671385717940?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_11_28-17669998616614360152?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_19_27-15108081848163722354?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_27_24-2481677169498878731?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_34_18-13929356270879613605?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_41_24-2533322408092278145?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_48_34-96891565940781534?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_55_51-11117698496653717460?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_03_41-16518772809661200539?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_12_48-13866658553938828844?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_23_59-5251219797266839691?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_31_51-6991883791010698572?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_39_21-10354812830135417120?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-05_17_47_26-17448167249665943268?project=apache-beam-testing
> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/direct/py2/build.gradle'>
line: 50
* What went wrong:
Execution failed for task ':sdks:python:test-suites:direct:py2:directRunnerIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 116
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 4m 13s
159 actionable tasks: 127 executed, 30 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/ufpfm2teubhn2
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]