See
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/93/display/redirect>
Changes:
------------------------------------------
[...truncated 5.55 MB...]
"name": "s1",
"properties": {
"display_data": [
{
"key": "source",
"label": "Read Source",
"namespace": "apache_beam.io.iobase.Read",
"shortValue": "_PubSubSource",
"type": "STRING",
"value": "apache_beam.io.gcp.pubsub._PubSubSource"
},
{
"key": "with_attributes",
"label": "With Attributes",
"namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
"type": "BOOLEAN",
"value": false
},
{
"key": "subscription",
"label": "Pubsub Subscription",
"namespace": "apache_beam.io.gcp.pubsub._PubSubSource",
"type": "STRING",
"value":
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_input2cf57d51-a048-4687-8306-bc816a0b4860"
}
],
"format": "pubsub",
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "ReadFromPubSub/Read.out"
}
],
"pubsub_subscription":
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_input2cf57d51-a048-4687-8306-bc816a0b4860",
"user_name": "ReadFromPubSub/Read"
}
},
{
"kind": "ParallelDo",
"name": "s2",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "StreamingUserMetricsDoFn",
"type": "STRING",
"value":
"apache_beam.runners.dataflow.dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "generate_metrics.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s1"
},
"serialized_fn": "ref_AppliedPTransform_generate_metrics_4",
"user_name": "generate_metrics"
}
},
{
"kind": "ParallelWrite",
"name": "s3",
"properties": {
"display_data": [],
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"format": "pubsub",
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s2"
},
"pubsub_topic":
"projects/apache-beam-testing/topics/exercise_streaming_metrics_topic_output2cf57d51-a048-4687-8306-bc816a0b4860",
"user_name": "dump_to_pub/Write/NativeWrite"
}
}
],
"type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: u'2020-03-11T06:46:21.550668Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2020-03-10_23_46_20-2930105613095516448'
location: u'us-central1'
name: u'beamapp-jenkins-0311064559-808493'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2020-03-11T06:46:21.550668Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2020-03-10_23_46_20-2930105613095516448]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_20-2930105613095516448?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-03-10_23_46_20-2930105613095516448 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:20.573Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2020-03-10_23_46_20-2930105613095516448. The number of workers will be between
1 and 100.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:20.573Z:
JOB_MESSAGE_WARNING: Autoscaling is enabled for Dataflow Streaming Engine.
Workers will scale between 1 and 100 unless maxNumWorkers is specified.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:20.573Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2020-03-10_23_46_20-2930105613095516448.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:23.568Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:27.386Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-2 in us-central1-f.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.084Z:
JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable
parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.403Z:
JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.577Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.668Z:
JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.729Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write
steps
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.803Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:28.858Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.065Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.113Z:
JOB_MESSAGE_DETAILED: Fusing consumer generate_metrics into ReadFromPubSub/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.139Z:
JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/Write/NativeWrite into
generate_metrics
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.191Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.231Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.258Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:29.280Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:31.689Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:31.789Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:31.843Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:50.513Z:
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric
descriptors and Stackdriver will not create new Dataflow custom metrics for
this job. Each unique user-defined metric name (independent of the DoFn in
which it is defined) produces a new metric descriptor. To delete old / unused
metric descriptors see
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:46:55.522Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 so that
the pipeline can catch up with its backlog and keep up with its input rate.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:47:31.062Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-03-11T06:47:31.102Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting
for job 2020-03-10_23_46_20-2930105613095516448 after 60 seconds
google.auth.transport._http_client: DEBUG: Making request: GET
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1):
metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1"
200 144
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/[email protected]/token
HTTP/1.1" 200 192
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df.xml
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 28 tests in 2301.524s
FAILED (failures=1)
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_20-653244888607751791?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_55_42-7072372997108744723?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_05_32-3872878315275776623?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_15_36-17417928316221147357?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_20-8161987173511063587?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_55_39-6829195721151448356?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_04_35-8557083821282384483?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_20-2930105613095516448?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_54_35-515864849536081738?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_03_39-3557407317158213682?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_22-6723745013299273971?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_56_10-2288101927703439207?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_05_20-5286674956637785164?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_22-3302899930967398429?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_54_42-16879001192272881705?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_03_11-8534086246161143132?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_19-4341939464080592793?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_55_36-16712156770722682461?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_04_34-8113416113842678237?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_22-14472719127165775960?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_56_18-5387627293502040564?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-11_00_04_44-6581323231224847307?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_46_24-850421407842126592?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-03-10_23_56_09-4370821474270015367?project=apache-beam-testing
> Task :sdks:python:test-suites:dataflow:py2:validatesRunnerStreamingTests
> FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
line: 113
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py2:validatesRunnerBatchTests'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/test-suites/dataflow/py2/build.gradle'>
line: 142
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py2:validatesRunnerStreamingTests'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 20m 7s
64 actionable tasks: 46 executed, 18 from cache
Publishing build scan...
https://gradle.com/s/yabu3khleneda
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]