See
<https://builds.apache.org/job/beam_PostCommit_Python37/2189/display/redirect?page=changes>
Changes:
[github] [BEAM-9674] Don't specify selected fields when fetching BigQuery table
------------------------------------------
[...truncated 10.71 MB...]
"value": false
}
],
"format": "pubsub",
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "ReadFromPubSub/Read.out"
}
],
"pubsub_subscription":
"projects/apache-beam-testing/subscriptions/exercise_streaming_metrics_subscription_input416629f3-95e9-4d5d-9b11-4599a094ca5c",
"user_name": "ReadFromPubSub/Read"
}
},
{
"kind": "ParallelDo",
"name": "s2",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "StreamingUserMetricsDoFn",
"type": "STRING",
"value":
"apache_beam.runners.dataflow.dataflow_exercise_streaming_metrics_pipeline.StreamingUserMetricsDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "generate_metrics.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s1"
},
"serialized_fn": "ref_AppliedPTransform_generate_metrics_4",
"user_name": "generate_metrics"
}
},
{
"kind": "ParallelWrite",
"name": "s3",
"properties": {
"display_data": [],
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"format": "pubsub",
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s2"
},
"pubsub_topic":
"projects/apache-beam-testing/topics/exercise_streaming_metrics_topic_output416629f3-95e9-4d5d-9b11-4599a094ca5c",
"user_name": "dump_to_pub/Write/NativeWrite"
}
}
],
"type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: '2020-04-17T15:32:22.605662Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2020-04-17_08_32_20-15826217669199590966'
location: 'us-central1'
name: 'beamapp-jenkins-0417153205-864452'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2020-04-17T15:32:22.605662Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2020-04-17_08_32_20-15826217669199590966]
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_32_20-15826217669199590966?project=apache-beam-testing
apache_beam.runners.dataflow.dataflow_runner: INFO: Job
2020-04-17_08_32_20-15826217669199590966 is in state JOB_STATE_RUNNING
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:25.110Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:25.983Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-c.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.582Z:
JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable
parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.585Z:
JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.595Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.607Z:
JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into
optimizable parts.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.610Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write
steps
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.614Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.654Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.657Z:
JOB_MESSAGE_DETAILED: Fusing consumer generate_metrics into ReadFromPubSub/Read
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.660Z:
JOB_MESSAGE_DETAILED: Fusing consumer dump_to_pub/Write/NativeWrite into
generate_metrics
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.674Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.693Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.745Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.958Z:
JOB_MESSAGE_DEBUG: Executing wait step start2
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.974Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:26.980Z:
JOB_MESSAGE_BASIC: Starting 1 workers...
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:30.651Z:
JOB_MESSAGE_BASIC: Executing operation
ReadFromPubSub/Read+generate_metrics+dump_to_pub/Write/NativeWrite
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:47.050Z:
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric
descriptors and Stackdriver will not create new Dataflow custom metrics for
this job. Each unique user-defined metric name (independent of the DoFn in
which it is defined) produces a new metric descriptor. To delete old / unused
metric descriptors see
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:54.386Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:54.392Z:
JOB_MESSAGE_DEBUG: Executing input step topology_init_attach_disk_input_step
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:32:55.349Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-c.
apache_beam.runners.dataflow.dataflow_runner: INFO: 2020-04-17T15:33:15.037Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting
for job 2020-04-17_08_32_20-15826217669199590966 after 60 seconds
google.auth.transport._http_client: DEBUG: Making request: GET
http://169.254.169.254
google.auth.transport._http_client: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/project/project-id
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true
urllib3.connectionpool: DEBUG: Starting new HTTP connection (1):
metadata.google.internal:80
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1"
200 144
google.auth.transport.requests: DEBUG: Making request: GET
http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token
urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET
/computeMetadata/v1/instance/service-accounts/[email protected]/token
HTTP/1.1" 200 192
--------------------- >> end captured logging << ---------------------
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_17-16472279651733618666?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_54_57-1791996742779524705?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_03_02-15750399557730834331?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_11_05-9963086966186832886?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_19_09-871038572102511585?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_27_18-3245774274628196327?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_12-6554864332020951074?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_00_44-347285211959689516?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_08_52-8865087178173495170?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_17_27-16599898025148321168?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_26_11-2507696048915601410?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_15-5317072520490709403?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_53_02-6994400953490566206?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_00_38-11403989356320526804?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_08_57-7764376301584683165?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_17_49-18313787770280980434?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_25_32-3358355133101943146?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_34_11-8250281007471347870?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_12-242602190498951052?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_01_24-6639353748031731460?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_10_31-16213653247042321684?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_28_02-10284204962100984829?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_14-17288333900657869576?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_49_27-11881685614149728201?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_58_52-2031435483058468280?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_07_36-10314091313247539194?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_16_12-9980690986650267514?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_24_10-1548302799746493812?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_32_20-15826217669199590966?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_11-9691228980707608444?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_49_07-4742571971614902753?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_57_27-2894803809915803973?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_07_06-14720582220221982134?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_15_26-2758617839453231801?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_23_56-7632251526124064180?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_32_16-9876210589654193462?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_12-4215050407470054100?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_50_18-17928809460371508041?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_59_08-814507343858117056?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_07_21-14531287691100267882?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_15_56-15922553506184561024?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_23_31-15861659715336834213?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_30_13-4281831309419696109?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_37_45-17110726964263778905?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_41_12-2596307876262218253?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_07_50_37-3335874171837291239?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_00_52-11323061429823495143?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_09_50-11422429708817136202?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_18_18-9682282869840909677?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-04-17_08_26_37-373456699760497347?project=apache-beam-testing
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py37.xml
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 58 tests in 3904.217s
FAILED (SKIP=7, errors=1)
> Task :sdks:python:test-suites:dataflow:py37:postCommitIT FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/test-suites/direct/py37/build.gradle'>
line: 60
* What went wrong:
Execution failed for task
':sdks:python:test-suites:direct:py37:hdfsIntegrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 255
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Python37/ws/src/sdks/python/test-suites/dataflow/py37/build.gradle'>
line: 89
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py37:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 6m 36s
87 actionable tasks: 68 executed, 19 from cache
Publishing build scan...
https://gradle.com/s/27nmk6nmn5vrw
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]