See
<https://ci-beam.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/930/display/redirect>
Changes:
------------------------------------------
[...truncated 5.81 MB...]
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_4"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_4"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name":
"assert_that/Group/Map(_merge_tagged_vals_under_key).out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s18"
},
"serialized_fn":
"ref_AppliedPTransform_assert_that/Group/Map(_merge_tagged_vals_under_key)_28",
"user_name": "assert_that/Group/Map(_merge_tagged_vals_under_key)"
}
},
{
"kind": "ParallelDo",
"name": "s20",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "<lambda>"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_4"
},
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_4"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_4"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "assert_that/Unkey.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s19"
},
"serialized_fn": "ref_AppliedPTransform_assert_that/Unkey_29",
"user_name": "assert_that/Unkey"
}
},
{
"kind": "ParallelDo",
"name": "s21",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "_equal"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_4"
},
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_4"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_4"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "assert_that/Match.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s20"
},
"serialized_fn": "ref_AppliedPTransform_assert_that/Match_30",
"user_name": "assert_that/Match"
}
}
],
"type": "JOB_TYPE_STREAMING"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: u'2020-08-16T18:31:21.916631Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2020-08-16_11_31_20-6234680426288134815'
location: u'us-central1'
name: u'beamapp-jenkins-0816183112-652076'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2020-08-16T18:31:21.916631Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2020-08-16_11_31_20-6234680426288134815]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job:
2020-08-16_11_31_20-6234680426288134815
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_20-6234680426288134815?project=apache-beam-testing
apache_beam.runners.dataflow.test_dataflow_runner: WARNING: Waiting
indefinitely for streaming job.
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-validatesRunnerStreamingTests-df-py27.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 27 tests in 1985.890s
FAILED (errors=1, failures=1)
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_21-9037222113271597068?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_39_46-716701577424680874?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_48_05-9019085114146527218?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_56_34-5049021131738558573?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_22-4708241921666799835?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_39_47-2708023825063462764?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_20-3099620918365585810?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_38_37-10450551902355259850?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_47_03-6385651421698123016?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_24-14969015524090397816?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_38_54-5863247188267769453?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_20-8928016318266735407?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_38_41-14859237913906434094?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_46_06-10816916624320926674?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_22-2720546799223504018?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_39_52-11126057457280570771?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_47_18-431317261670945915?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_22-10179356360221705459?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_39_58-785155043040720512?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_20-6234680426288134815?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_31_47-8604632855090559480?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_40_27-18373567747479774359?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_11_47_48-10985968567899238832?project=apache-beam-testing
> Task :sdks:python:test-suites:dataflow:py2:validatesRunnerStreamingTests
> FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 144
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py2:validatesRunnerBatchTests'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Py_VR_Dataflow_V2/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 173
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py2:validatesRunnerStreamingTests'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 1h 3m 25s
65 actionable tasks: 47 executed, 18 from cache
Publishing build scan...
https://gradle.com/s/vfhsctoit4s34
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]