See
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/1347/display/redirect>
Changes:
------------------------------------------
[...truncated 728.17 KB...]
self =
<apache_beam.transforms.validate_runner_xlang_test.ValidateRunnerXlangTest
testMethod=test_prefix>
test_pipeline = None
@pytest.mark.xlang_transforms
def test_prefix(self, test_pipeline=None):
CrossLanguageTestPipelines().run_prefix(
> test_pipeline or self.create_pipeline())
apache_beam/transforms/validate_runner_xlang_test.py:254:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/transforms/validate_runner_xlang_test.py:99: in run_prefix
assert_that(res, equal_to(['0a', '0b']))
apache_beam/pipeline.py:596: in __exit__
self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/dataflow/test_dataflow_runner.py:64: in run_pipeline
self.result.wait_until_finish(duration=wait_duration)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <DataflowPipelineResult <Job
clientRequestId: '20211022122056190981-3474'
createTime: '2021-10-22T12:21:01.981955Z'
...021-10-22T12:21:01.981955Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)> at 0x7f3879d15668>
duration = None
def wait_until_finish(self, duration=None):
if not self.is_in_terminal_state():
if not self.has_job:
raise IOError('Failed to get the Dataflow job id.')
thread = threading.Thread(
target=DataflowRunner.poll_for_job_completion,
args=(self._runner, self, duration))
# Mark the thread as a daemon thread so a keyboard interrupt on the main
# thread will terminate everything. This is also the reason we will not
# use thread.join() to wait for the polling thread.
thread.daemon = True
thread.start()
while thread.is_alive():
time.sleep(5.0)
# TODO: Merge the termination code in poll_for_job_completion and
# is_in_terminal_state.
terminated = self.is_in_terminal_state()
assert duration or terminated, (
'Job did not reach to a terminal state after waiting indefinitely.')
if terminated and self.state != PipelineState.DONE:
# TODO(BEAM-1290): Consider converting this to an error log based on
# theresolution of the issue.
raise DataflowRuntimeException(
'Dataflow pipeline failed. State: %s, Error:\n%s' %
(self.state, getattr(self._runner, 'last_error_msg', None)),
> self)
E
apache_beam.runners.dataflow.dataflow_runner.DataflowRuntimeException: Dataflow
pipeline failed. State: FAILED, Error:
E Workflow failed. Causes: Job appears to be stuck. Several workers
have failed to start up in a row, and no worker has successfully started up for
this job. Last error reported: Unable to pull container image due to error:
image pull request failed with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image..
apache_beam/runners/dataflow/dataflow_runner.py:1643: DataflowRuntimeException
------------------------------ Captured log call -------------------------------
INFO apache_beam.runners.portability.stager:stager.py:303 Copying Beam SDK
"<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/ws/src/sdks/python/build/apache-beam.tar.gz">
to staging location.
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
INFO root:environments.py:380 Default Python SDK image for environment is
apache/beam_python3.6_sdk:2.35.0.dev
INFO root:environments.py:296 Using provided Python SDK container image:
gcr.io/cloud-dataflow/v1beta3/python36-fnapi:beam-master-20211015
INFO root:environments.py:304 Python SDK container image set to
"gcr.io/cloud-dataflow/v1beta3/python36-fnapi:beam-master-20211015" for Docker
environment
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function pack_combiners at 0x7f387c039598>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function sort_stages at 0x7f387c039d08>
====================
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:454
Defaulting to the temp_location as staging_location:
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/icedtea-sound-Bdoi2wYa757-fzq5vconCy4SSQ22ZaOq7yuC98fKPs8.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/icedtea-sound-Bdoi2wYa757-fzq5vconCy4SSQ22ZaOq7yuC98fKPs8.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/jaccess-CMbK-IOdQPLKHEqCuDnE-yBk-VpbtVT-hgjbHRUGO78.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/jaccess-CMbK-IOdQPLKHEqCuDnE-yBk-VpbtVT-hgjbHRUGO78.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/localedata-ae5Z0L6ak4922fztWeWy7ajiWXdG3ubNrwerJRFoFj0.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/localedata-ae5Z0L6ak4922fztWeWy7ajiWXdG3ubNrwerJRFoFj0.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/nashorn-XHtz_UehGpYcLTOrATrTnMNVUgEVa_ttoWkPxnVfqTo.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/nashorn-XHtz_UehGpYcLTOrATrTnMNVUgEVa_ttoWkPxnVfqTo.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/cldrdata-k07I6K9W3X5KTQbcDIEsqM0LXyM18f0eR6IaJw-P_xk.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/cldrdata-k07I6K9W3X5KTQbcDIEsqM0LXyM18f0eR6IaJw-P_xk.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/dnsns-RGhCDg3GVOQVC2r6ka2N0hmI4eqQH6VobuoAnQ74MnE.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/dnsns-RGhCDg3GVOQVC2r6ka2N0hmI4eqQH6VobuoAnQ74MnE.jar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/beam-sdks-java-testing-expansion-service-testExpansionService-2.35.0-SNAPSHOT-chW-Opb9iI4d_uet_1t9qGaU5878hJpS-348YJlZVEo.jar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/beam-sdks-java-testing-expansion-service-testExpansionService-2.35.0-SNAPSHOT-chW-Opb9iI4d_uet_1t9qGaU5878hJpS-348YJlZVEo.jar
in 4 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/dataflow_python_sdk.tar...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/dataflow_python_sdk.tar
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:638
Starting GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/pipeline.pb...
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:657
Completed GCS upload to
gs://dataflow-staging-us-central1-77b801c0838aee13391c0d1885860494/beamapp-jenkins-1022122056-189594.1634905256.190092/pipeline.pb
in 0 seconds.
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:818
Create job: <Job
clientRequestId: '20211022122056190981-3474'
createTime: '2021-10-22T12:21:01.981955Z'
currentStateTime: '1970-01-01T00:00:00Z'
id:
'2021-10-22_05_21_01-3974354247118139594'
location: 'us-central1'
name: 'beamapp-jenkins-1022122056-189594'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2021-10-22T12:21:01.981955Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:820
Created job with id: [2021-10-22_05_21_01-3974354247118139594]
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:821
Submitted job: 2021-10-22_05_21_01-3974354247118139594
INFO apache_beam.runners.dataflow.internal.apiclient:apiclient.py:827 To
access the Dataflow monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2021-10-22_05_21_01-3974354247118139594?project=apache-beam-testing
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:191
Job 2021-10-22_05_21_01-3974354247118139594 is in state JOB_STATE_RUNNING
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:04.684Z: JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2021-10-22_05_21_01-3974354247118139594. The number of workers will be between
1 and 1000.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:04.730Z: JOB_MESSAGE_DETAILED: Autoscaling was automatically
enabled for job 2021-10-22_05_21_01-3974354247118139594.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:06.581Z: JOB_MESSAGE_BASIC: Worker configuration:
e2-standard-2 in us-central1-b.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.387Z: JOB_MESSAGE_DETAILED: Expanding SplittableParDo
operations into optimizable parts.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.421Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton
operations into optimizable parts.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.506Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey
operations into optimizable parts.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.534Z: JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
assert_that/Group/CoGroupByKeyImpl/GroupByKey: GroupByKey not followed by a
combiner.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.557Z: JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey: GroupByKey not
followed by a combiner.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.584Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations
into optimizable parts.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.611Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner
information.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.656Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read,
Write, and Flatten operations
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.689Z: JOB_MESSAGE_DEBUG: Inserted coder converter before
flatten ref_AppliedPTransform_assert_that-Group-CoGroupByKeyImpl-Flatten_27
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.713Z: JOB_MESSAGE_DEBUG: Inserted coder converter before
flatten ref_AppliedPTransform_assert_that-Group-CoGroupByKeyImpl-Flatten_27
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.733Z: JOB_MESSAGE_DETAILED: Unzipping flatten
ref_AppliedPTransform_assert_that-Group-CoGroupByKeyImpl-Flatten_27 for input
ref_AppliedPTransform_assert_that-Group-CoGroupByKeyImpl-Tag-0-_25.None-post14
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.759Z: JOB_MESSAGE_DETAILED: Fusing unzipped copy of
assert_that/Group/CoGroupByKeyImpl/GroupByKey/Write, through flatten
assert_that/Group/CoGroupByKeyImpl/Flatten, into producer
assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.779Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/MapTuple(collect_values) into
assert_that/Group/CoGroupByKeyImpl/GroupByKey/Read
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.809Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/RestoreTags into
assert_that/Group/CoGroupByKeyImpl/MapTuple(collect_values)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.845Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Unkey into assert_that/Group/RestoreTags
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.887Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Match into assert_that/Unkey
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.921Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/GroupByKey/Write into
assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.957Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/FlatMap(<lambda at core.py:3222>) into Create/Impulse
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:07.986Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/AddRandomKeys into Create/FlatMap(<lambda at
core.py:3222>)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.041Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/Map(reify_timestamps) into
Create/MaybeReshuffle/Reshuffle/AddRandomKeys
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.074Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Reify into
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/Map(reify_timestamps)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.109Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Write into
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Reify
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.134Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/GroupByWindow into
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Read
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.162Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/FlatMap(restore_timestamps)
into Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/GroupByWindow
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.198Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/MaybeReshuffle/Reshuffle/RemoveRandomKeys into
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/FlatMap(restore_timestamps)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.230Z: JOB_MESSAGE_DETAILED: Fusing consumer
Create/Map(decode) into Create/MaybeReshuffle/Reshuffle/RemoveRandomKeys
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.258Z: JOB_MESSAGE_DETAILED: Fusing consumer
ExternalTransform(beam:transforms:xlang:test:prefix)/Map/ParMultiDo(Anonymous)
into Create/Map(decode)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.290Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/WindowInto(WindowIntoFn) into
ExternalTransform(beam:transforms:xlang:test:prefix)/Map/ParMultiDo(Anonymous)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.321Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Create/FlatMap(<lambda at core.py:3222>) into
assert_that/Create/Impulse
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.353Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Create/Map(decode) into assert_that/Create/FlatMap(<lambda at
core.py:3222>)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.388Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/Tag[0] into assert_that/Create/Map(decode)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.414Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity into
assert_that/Group/CoGroupByKeyImpl/Tag[0]
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.447Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/ToVoidKey into assert_that/WindowInto(WindowIntoFn)
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.501Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/Tag[1] into assert_that/ToVoidKey
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.524Z: JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity into
assert_that/Group/CoGroupByKeyImpl/Tag[1]
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.550Z: JOB_MESSAGE_DEBUG: Workflow config is missing a
default resource spec.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.585Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and
teardown to workflow graph.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.621Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop
steps.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.660Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.779Z: JOB_MESSAGE_DEBUG: Executing wait step start26
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.831Z: JOB_MESSAGE_BASIC: Executing operation
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Create
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.856Z: JOB_MESSAGE_BASIC: Executing operation
assert_that/Group/CoGroupByKeyImpl/GroupByKey/Create
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.869Z: JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.903Z: JOB_MESSAGE_BASIC: Starting 1 workers in
us-central1-b...
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.954Z: JOB_MESSAGE_BASIC: Finished operation
Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Create
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:08.967Z: JOB_MESSAGE_BASIC: Finished operation
assert_that/Group/CoGroupByKeyImpl/GroupByKey/Create
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:09.003Z: JOB_MESSAGE_DEBUG: Value
"Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Session"
materialized.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:09.026Z: JOB_MESSAGE_DEBUG: Value
"assert_that/Group/CoGroupByKeyImpl/GroupByKey/Session" materialized.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:09.060Z: JOB_MESSAGE_BASIC: Executing operation
Create/Impulse+Create/FlatMap(<lambda at
core.py:3222>)+Create/MaybeReshuffle/Reshuffle/AddRandomKeys+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/Map(reify_timestamps)+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Reify+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Write
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:09.093Z: JOB_MESSAGE_BASIC: Executing operation
assert_that/Create/Impulse+assert_that/Create/FlatMap(<lambda at
core.py:3222>)+assert_that/Create/Map(decode)+assert_that/Group/CoGroupByKeyImpl/Tag[0]+assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity+assert_that/Group/CoGroupByKeyImpl/GroupByKey/Write
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:33.655Z: JOB_MESSAGE_BASIC: Your project already contains 100
Dataflow-created metric descriptors, so new user metrics of the form
custom.googleapis.com/* will not be created. However, all user metrics are also
available in the metric dataflow.googleapis.com/job/user_counter. If you rely
on the custom metrics, you can delete old / unused metric descriptors. See
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:21:43.789Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number
of workers to 1 based on the rate of progress in the currently running stage(s).
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:22:34.338Z: JOB_MESSAGE_WARNING: A worker was unable to start up.
Error: Unable to pull container image due to error: image pull request failed
with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:23:01.913Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number
of workers to 1 based on the rate of progress in the currently running stage(s).
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:23:44.459Z: JOB_MESSAGE_WARNING: A worker was unable to start up.
Error: Unable to pull container image due to error: image pull request failed
with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:24:11.958Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number
of workers to 1 based on the rate of progress in the currently running stage(s).
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:24:53.958Z: JOB_MESSAGE_WARNING: A worker was unable to start up.
Error: Unable to pull container image due to error: image pull request failed
with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:25:20.142Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number
of workers to 1 based on the rate of progress in the currently running stage(s).
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:26:02.952Z: JOB_MESSAGE_WARNING: A worker was unable to start up.
Error: Unable to pull container image due to error: image pull request failed
with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:26:29.661Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number
of workers to 1 based on the rate of progress in the currently running stage(s).
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.695Z: JOB_MESSAGE_WARNING: A worker was unable to start up.
Error: Unable to pull container image due to error: image pull request failed
with error: Error response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.727Z: JOB_MESSAGE_ERROR: Workflow failed. Causes: Job
appears to be stuck. Several workers have failed to start up in a row, and no
worker has successfully started up for this job. Last error reported: Unable to
pull container image due to error: image pull request failed with error: Error
response from daemon: manifest for
us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113 not found:
manifest unknown: Failed to fetch "20211022120113" from request
"/v2/apache-beam-testing/java-postcommit-it/java/manifests/20211022120113"..
This is likely due to an invalid SDK container image URL. Please verify any
provided SDK container image is valid and that Dataflow workers have
permissions to pull image..
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.794Z: JOB_MESSAGE_BASIC: Finished operation
Create/Impulse+Create/FlatMap(<lambda at
core.py:3222>)+Create/MaybeReshuffle/Reshuffle/AddRandomKeys+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/Map(reify_timestamps)+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Reify+Create/MaybeReshuffle/Reshuffle/ReshufflePerKey/GroupByKey/Write
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.794Z: JOB_MESSAGE_BASIC: Finished operation
assert_that/Create/Impulse+assert_that/Create/FlatMap(<lambda at
core.py:3222>)+assert_that/Create/Map(decode)+assert_that/Group/CoGroupByKeyImpl/Tag[0]+assert_that/Group/CoGroupByKeyImpl/Flatten/InputIdentity+assert_that/Group/CoGroupByKeyImpl/GroupByKey/Write
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.866Z: JOB_MESSAGE_DETAILED: Cleaning up.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.938Z: JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:11.960Z: JOB_MESSAGE_BASIC: Stopping worker pool...
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:12.342Z: JOB_MESSAGE_DETAILED: Autoscaling: Resized worker
pool from 1 to 0.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:59.303Z: JOB_MESSAGE_BASIC: Worker pool stopped.
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:236
2021-10-22T12:27:59.333Z: JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO apache_beam.runners.dataflow.dataflow_runner:dataflow_runner.py:191
Job 2021-10-22_05_21_01-3974354247118139594 is in state JOB_STATE_FAILED
=============================== warnings summary ===============================
apache_beam/io/filesystems_test.py:54
apache_beam/io/filesystems_test.py:54
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:54:
DeprecationWarning: invalid escape sequence \c
self.assertIsNone(FileSystems.get_scheme('c:\\abc\cdf')) # pylint:
disable=anomalous-backslash-in-string
apache_beam/io/filesystems_test.py:62
apache_beam/io/filesystems_test.py:62
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:62:
DeprecationWarning: invalid escape sequence \d
self.assertTrue(isinstance(FileSystems.get_filesystem('c:\\abc\def'), #
pylint: disable=anomalous-backslash-in-string
-- Docs: https://docs.pytest.org/en/latest/warnings.html
- generated xml file:
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/ws/src/sdks/python/pytest_xlangValidateRunner.xml>
-
=============== 9 failed, 1 passed, 4 warnings in 972.24 seconds ===============
> Task
> :runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerPythonUsingJava
> FAILED
> Task :runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerCleanup
Stopping expansion service pid: 10933.
Stopping expansion service pid: 10936.
> Task :runners:google-cloud-dataflow-java:cleanUpDockerImages FAILED
Untagged: us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113
Untagged:
us.gcr.io/apache-beam-testing/java-postcommit-it/java@sha256:5976ba21d43c835ecaa869e505071ba1a5ebfc844e6a8a6e897e7f4f1f6cf717
ERROR: (gcloud.container.images.untag) Image could not be found:
[us.gcr.io/apache-beam-testing/java-postcommit-it/java:20211022120113]
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:google-cloud-dataflow-java:validatesCrossLanguageRunnerPythonUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Dataflow/ws/src/runners/google-cloud-dataflow-java/build.gradle'>
line: 279
* What went wrong:
Execution failed for task
':runners:google-cloud-dataflow-java:cleanUpDockerImages'.
> Process 'command 'gcloud'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/6.9.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 28m 9s
132 actionable tasks: 94 executed, 34 from cache, 4 up-to-date
Publishing build scan...
https://gradle.com/s/rpummak6puobc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]