See
<https://ci-beam.apache.org/job/beam_PreCommit_Python_Cron/4196/display/redirect?page=changes>
Changes:
[noreply] Update release notes for 2.29.0
------------------------------------------
[...truncated 1.81 MB...]
"user_name": "format"
}
},
{
"kind": "ParallelDo",
"name": "s9",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "<lambda>"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:interval_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "encode.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s8"
},
"serialized_fn": "ref_AppliedPTransform_encode_11",
"user_name": "encode"
}
},
{
"kind": "ParallelDo",
"name": "s10",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "bytes_to_proto_str"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:interval_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "WriteToPubSub/ToProtobuf.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s9"
},
"serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_13",
"user_name": "WriteToPubSub/ToProtobuf"
}
},
{
"kind": "ParallelWrite",
"name": "s11",
"properties": {
"display_data": [],
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"format": "pubsub",
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s10"
},
"pubsub_serialized_attributes_fn": "",
"pubsub_topic":
"projects/apache-beam-testing/topics/wc_topic_outputc1c361c0-636f-40c2-981d-9b124c0efe8d",
"user_name": "WriteToPubSub/Write/NativeWrite"
}
}
],
"type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: '2021-05-12T18:33:50.097970Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2021-05-12_11_33_49-6840482890592786530'
location: 'us-central1'
name: 'beamapp-jenkins-0512183338-398797'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2021-05-12T18:33:50.097970Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2021-05-12_11_33_49-6840482890592786530]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job:
2021-05-12_11_33_49-6840482890592786530
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2021-05-12_11_33_49-6840482890592786530?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2021-05-12_11_33_49-6840482890592786530 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:52.257Z:
JOB_MESSAGE_WARNING: Autoscaling is enabled for Dataflow Streaming Engine.
Workers will scale between 1 and 100 unless maxNumWorkers is specified.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:52.347Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2021-05-12_11_33_49-6840482890592786530. The number of workers will be between
1 and 100.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:52.384Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2021-05-12_11_33_49-6840482890592786530.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:54.558Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-2 in us-central1-f.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.212Z:
JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable
parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.271Z:
JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.361Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.397Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step group: GroupByKey not
followed by a combiner.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.463Z:
JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.496Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write
steps
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.548Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.613Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.653Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.690Z:
JOB_MESSAGE_DETAILED: Fusing consumer decode into ReadFromPubSub/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.723Z:
JOB_MESSAGE_DETAILED: Fusing consumer split into decode
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.756Z:
JOB_MESSAGE_DETAILED: Fusing consumer pair_with_one into split
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.801Z:
JOB_MESSAGE_DETAILED: Fusing consumer WindowInto(WindowIntoFn) into
pair_with_one
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.836Z:
JOB_MESSAGE_DETAILED: Fusing consumer group/WriteStream into
WindowInto(WindowIntoFn)
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.867Z:
JOB_MESSAGE_DETAILED: Fusing consumer group/MergeBuckets into group/ReadStream
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.907Z:
JOB_MESSAGE_DETAILED: Fusing consumer count into group/MergeBuckets
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.944Z:
JOB_MESSAGE_DETAILED: Fusing consumer format into count
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:55.979Z:
JOB_MESSAGE_DETAILED: Fusing consumer encode into format
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.013Z:
JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/ToProtobuf into encode
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.052Z:
JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/Write/NativeWrite into
WriteToPubSub/ToProtobuf
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.092Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.121Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.156Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.187Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.251Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.280Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:33:56.312Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:34:27.518Z:
JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric
descriptors, so new user metrics of the form custom.googleapis.com/* will not
be created. However, all user metrics are also available in the metric
dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics,
you can delete old / unused metric descriptors. See
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:34:38.910Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 so that
the pipeline can catch up with its backlog and keep up with its input rate.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:35:15.356Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-05-12T18:35:15.396Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting
for job 2021-05-12_11_33_49-6840482890592786530 after 361 seconds
google.auth._default: DEBUG: Checking None for explicit credentials as part of
auth process...
google.auth._default: DEBUG: Checking Cloud SDK credentials as part of auth
process...
google.auth._default: DEBUG: Cloud SDK credentials not found on disk; not using
them
google.auth._default: DEBUG: Checking for App Engine runtime as part of auth
process...
google.auth._default: DEBUG: No App Engine library was found so cannot
authentication via App Engine Identity Credentials.
google.auth.transport._http_client: DEBUG: Making request: GET
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1):
metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1"
200 144
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub
HTTP/1.1" 200 244
--------------------- >> end captured logging << ---------------------
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2021-05-12_11_33_49-6840482890592786530?project=apache-beam-testing
----------------------------------------------------------------------
XML: nosetests-preCommitIT-df-py36.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PreCommit_Python_Cron/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 1148.727s
FAILED (errors=1)
> Task :sdks:python:test-suites:dataflow:py36:preCommitIT_streaming_V2 FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:test-suites:tox:py37:testPy37Cloud'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PreCommit_Python_Cron/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 79
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py36:preCommitIT_streaming_V2'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 50m 14s
102 actionable tasks: 73 executed, 29 from cache
Publishing build scan...
https://gradle.com/s/q32zkrbw52tf4
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]