See
<https://ci-beam.apache.org/job/beam_PostCommit_Python2/2798/display/redirect>
Changes:
------------------------------------------
[...truncated 17.80 MB...]
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s39"
},
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/_UnpickledSideInput(TriggerLoadJobsWithoutTempTables.out.0)",
"windowing_strategy":
"%0A%88%11%22%40%0A%1Dref_Coder_GlobalWindowCoder_1%12%1F%0A%1D%0A%1Bbeam%3Acoder%3Aglobal_window%3Av1%2A%C3%10%0A%25ref_Environment_default_environment_1%12%99%10%12%12beam%3Aenv%3Adocker%3Av1%1A%3B%0A9gcr.io/cloud-dataflow/v1beta3/python%3Abeam-master-20200630%2A%14beam%3Acoder%3Avarint%3Av1%2A%13beam%3Acoder%3Abytes%3Av1%2A%13beam%3Acoder%3Atimer%3Av1%2A%1Bbeam%3Acoder%3Aglobal_window%3Av1%2A%1Dbeam%3Acoder%3Ainterval_window%3Av1%2A%16beam%3Acoder%3Aiterable%3Av1%2A%23beam%3Acoder%3Astate_backed_iterable%3Av1%2A%1Cbeam%3Acoder%3Awindowed_value%3Av1%2A%22beam%3Acoder%3Aparam_windowed_value%3Av1%2A%14beam%3Acoder%3Adouble%3Av1%2A%19beam%3Acoder%3Astring_utf8%3Av1%2A%1Bbeam%3Acoder%3Alength_prefix%3Av1%2A%12beam%3Acoder%3Abool%3Av1%2A%10beam%3Acoder%3Akv%3Av1%2A%11beam%3Acoder%3Arow%3Av1%2A%23beam%3Aprotocol%3Aprogress_reporting%3Av0%2A%1Ebeam%3Aprotocol%3Aworker_status%3Av1%2A%3Abeam%3Aversion%3Asdk_base%3Aapache/beam_python2.7_sdk%3A2.25.0.dev2q%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%12%1D%0A%1Bpostcommit_requirements.txt%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%12%0A%10requirements.txt2%9B%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%12%3D%0A%3B/tmp/dataflow-requirements-cache/parameterized-0.7.4.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%1C%0A%1Aparameterized-0.7.4.tar.gz2%89%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%124%0A2/tmp/dataflow-requirements-cache/mock-2.0.0.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%13%0A%11mock-2.0.0.tar.gz2%89%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%124%0A2/tmp/dataflow-requirements-cache/six-1.15.0.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%13%0A%11six-1.15.0.tar.gz2%91%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%128%0A6/tmp/dataflow-requirements-cache/funcsigs-1.0.2.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%17%0A%15funcsigs-1.0.2.tar.gz2%87%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%123%0A1/tmp/dataflow-requirements-cache/pbr-5.4.5.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%12%0A%10pbr-5.4.5.tar.gz2%97%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%12%3B%0A9/tmp/dataflow-requirements-cache/PyHamcrest-1.10.1.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%1A%0A%18PyHamcrest-1.10.1.tar.gz2%C3%01%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%12h%0Af<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/build/apache-beam.tar.gz%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%19%0A%17dataflow_python_sdk.tar2%9B%02%0A%1Abeam%3Aartifact%3Atype%3Afile%3Av1%12%C3%01%0A%C0%01/home/jenkins/jenkins-slave/workspace/beam_PostCommit_Python2/src/runners/google-cloud-dataflow-java/worker/build/libs/beam-runners-google-cloud-dataflow-java-fn-api-worker-2.25.0-SNAPSHOT.jar%1A%20beam%3Aartifact%3Arole%3Astaging_to%3Av1%22%15%0A%13dataflow-worker.jarjx%0A%22%0A%20beam%3Awindow_fn%3Aglobal_windows%3Av1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01Z%25ref_Environment_default_environment_1">
}
},
{
"kind": "ParallelDo",
"name": "s41",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "WaitForBQJobs",
"type": "STRING",
"value": "apache_beam.io.gcp.bigquery_file_loads.WaitForBQJobs"
}
],
"non_parallel_inputs": {
"python_side_input0-write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s40"
}
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s7"
},
"serialized_fn": "<string of 1276 bytes>",
"user_name":
"write/BigQueryBatchFileLoads/WaitForDestinationLoadJobs/WaitForDestinationLoadJobs"
}
},
{
"kind": "Flatten",
"name": "s42",
"properties": {
"display_data": [],
"inputs": [
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s39"
},
{
"@type": "OutputReference",
"output_name": "None",
"step_name": "s25"
}
],
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "FastPrimitivesCoder$<string of 176 bytes>",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/BigQueryBatchFileLoads/Flatten.out"
}
],
"user_name": "write/BigQueryBatchFileLoads/Flatten"
}
}
],
"type": "JOB_TYPE_BATCH"
}
apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job
createTime: u'2020-08-16T12:18:06.326433Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2020-08-16_05_18_05-10916297527687110992'
location: u'us-central1'
name: u'beamapp-jenkins-0816121753-644402'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2020-08-16T12:18:06.326433Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id:
[2020-08-16_05_18_05-10916297527687110992]
apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job:
2020-08-16_05_18_05-10916297527687110992
apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_18_05-10916297527687110992?project=apache-beam-testing
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df-py27.xml
----------------------------------------------------------------------
XML:
<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 66 tests in 3249.220s
FAILED (SKIP=6, errors=8, failures=4)
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_39-9218876129215103894?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_21_12-8029351426910307159?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_28_27-1665882482193038621?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_35_18-14304807627450514483?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_42_34-16410263802143632348?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_49_18-8832723089585497443?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_40-12012572222289591355?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_10_38-12342843378136529744?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_18_05-10916297527687110992?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_18_38-3289861097010652722?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_26_19-5832889648988019348?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_33_32-4963569884324567675?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_41_29-16144869670617007720?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_42-2942780605956485519?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_23_19-14643965292216605511?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_30_33-3332831267259962702?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_37_40-9776751262105000999?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_44_52-12429222479816435501?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_42-8342166706869583704?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_10_43-9261967695317338002?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_19_40-6162451191609178409?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_20_07-13753677316923764503?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_27_47-14708011283341823791?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_34_47-1695906379706791939?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_41_48-4411877310980681563?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_40-5085573782108968929?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_11_27-14997582207829372520?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_11_52-13266974768027932133?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_19_20-5856368794062578572?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_26_33-7599825171907716259?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_34_06-15986933436057982117?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_42_08-1235508778002673389?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_40-14939267841377380614?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_10_26-6334925929230578852?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_18_02-11177104190764039521?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_18_33-13745380250886442132?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_25_42-2786060916306486169?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_33_39-15699921093330375376?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_41_30-3373593283077345197?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_41-1571466841519563600?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_09_53-7947255838696562068?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_16_09-6355936833040813841?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_23_52-18100248828205386833?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_31_02-10556583003390372906?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_38_04-17149914748661975440?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_02_41-12865187233357824749?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_10_18-6318113887164545279?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_16_54-18294911767913967976?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_23_46-6898465494138820670?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_31_40-9854282489754816895?project=apache-beam-testing
Worker logs:
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-08-16_05_39_23-16046509125500782960?project=apache-beam-testing
> Task :sdks:python:test-suites:dataflow:py2:postCommitIT FAILED
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/direct/py2/build.gradle'>
line: 50
* What went wrong:
Execution failed for task ':sdks:python:test-suites:direct:py2:directRunnerIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 116
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py2:postCommitIT'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 55m 56s
159 actionable tasks: 122 executed, 35 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/agoo2fdr77gfo
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]