See
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/62/display/redirect?page=changes>
Changes:
[valentyn] Add Precommit it tests for Python 3.
[jozsi] Make metrics flush periodically
[jozsi] Rename pipeline option for parallelism
[jozsi] Make improvements in documentation
[jozsi] Add support for running "NeedsRunner" category of tests
[jozsi] Make cooperative ParDo execution an option, when possible
[jozsi] Remove previous edge optimization, it's not playing well with file-write
[jozsi] Make ParDoLifecycleTest less restrictive
[jozsi] Make IMPULSE work in Jet Runner
[aromanenko.dev] [BEAM-7357] KinesisIO: fix too many checks that writing stream
exists.
[mxm] [BEAM-7442][BEAM-5650] Read sequentially from bounded sources in
[rtnguyen] Transform catalog for Python
------------------------------------------
[...truncated 323.19 KB...]
}
},
"output_name": "out",
"user_name":
"write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(PreFinalize.out.0).output"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s17"
},
"user_name":
"write/Write/WriteImpl/FinalizeWrite/_UnpickledSideInput(PreFinalize.out.0)",
"windowing_strategy":
"%0AB%22%40%0A%1Dref_Coder_GlobalWindowCoder_1%12%1F%0A%1D%0A%1Bbeam%3Acoder%3Aglobal_window%3Av1jT%0A%25%0A%23%0A%21beam%3Awindowfn%3Aglobal_windows%3Av0.1%10%01%1A%1Dref_Coder_GlobalWindowCoder_1%22%02%3A%00%28%010%018%01H%01"
}
},
{
"kind": "ParallelDo",
"name": "s21",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "_finalize_write"
}
],
"non_parallel_inputs": {
"side0-write/Write/WriteImpl/FinalizeWrite": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s18"
},
"side1-write/Write/WriteImpl/FinalizeWrite": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s19"
},
"side2-write/Write/WriteImpl/FinalizeWrite": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s20"
}
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYE5OLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqYIfgYGhvi0xJycpMTk7HiwlkJ8pgVkJmfnpEJNYQGawlpbyJZUnKQHACYlLgM=",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/Write/WriteImpl/FinalizeWrite.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s7"
},
"serialized_fn":
"eNrNV/l/G8UV10rOwZKmAZrQQA+RklamSBsbkoBLE0BJwIgo7trgbUu6jHZHmo13d/bNzFoxtbhSOab0hrZQaJtQet/3ffwF/Zv6ZiQ5Fdg0v6Wfj727ejPvzbzv+77v7D5bqgQkIwGjfouSpKYESWWbi0TWAi6oXSdxTFoxXRQky6g4wU+lNhQmnwOrB8WKVyoUCn47hdJYkIjjX4tIavvtKCVx9BT1uyJS1IYJbwe6ZIIHVErYxia863UItZJRn0WpkrB9fD84YOy1kOKGiOJC2rNnFtD8sDbbsAM3s7PZg+sqJlSUZrky8STYTW8Xmniurtiub+ZrsKuVt87Cuypv3bWi51XEbX8B7/NRumTDboz+7h7sqXh7MFI7iqmfEcX8TNB2dB5uGIvAM/ROZW2ZxDnOE3w5Cqmw5xVRUfC4Ns6NbHAjBr6pB++peDYGHnjoHcLeIIziuObrq+3HnITGbsM+g7ZUAm7uw3td2O9tG3nCLd4ZfO7IGcdRNMmqEnEiHVrFSlZpGlYVH9yoVNLJVqqRqgYxz0NngI0zdfjwvXdNH7l7+ujU1FFHUJnHWIpb89aVtFOSUF/mbZ32+5je/vt78IEK28v2s1s8nFWAD+J8nU2aJ75kRIQSyo2C2WbAMWu4bQwuY5PDm73A55WI0k6dG3wO4AIf6sHtLe8GnbWONtiCJidRcNC7He3Vg5XhUJ5MHjocVnkbTVfW1zb4sLdvPEQn5q1RnI+Yweodm3oOqh7wBMstJdZ2UKJJbwLNJFcc7vCuw8ckSobV+6gBQPPIyWISpXCnt18TXHdPipUUJIoxST+lXbwjH6vrpiN8RolGqNbMW+OsTBUV2EO1XEWx/YDo5AlN1VxMAsp4bKByEKpDTF+nGpa5T9Me3HUW7q54kzpzZNyhqmlAZ/HKdTbJYufUsD+NBQ6PLZ0Zbtn+Y2kWBUsxDecx0KzuLxuO9OBoxcAQEkXgns0cN6afwCk23Isbm+nBxyrebo1eoBVAN5POD+7zbkWj9p7R2/VNG8/gnoRWn5nlKfi4IUI3SkPe9RPEU8OI0nNsK/3ScUwYaZuKk9h/m7cNxw38yxHt6mD3jzVfIChRyLg8DXRj2/BAhe3zduJ8TVKtQfBgH+ounKg0rEYB/0uNm+r2emG1sG5dKM4X4GSzD6cmjcsoF3ioDw97Ai0O4wl1ztF0KUrl6F6VMVmmTpeLJYl5UUen5c9RYfiaBnRB97C/yEVY53mqZhf8uZXpo44UgSPDJd3divHU+S9QnEE9atkKzJqt3BeTpBWSY/DI6QetegEa3l7d5YInvsCQmswbu33USKjBZyhvcHoNmpMKzrgwNwZXhyofy4mM/IRZppVHscKUwDUY47Aehfk1WHDhsTHXKMm4UH7CwzxGqXvcu1FT5G2UgsU+eC580oT30TdQvg+fWoNPu/AEe6S5WfUCij/gLMPaYZmKWKZSY0ejXn9SFZR1qbhaCK3LxQtWr7halAdWrUulsKgmLhQuY/nUtlUcCUtLRXFktRhO7C+gbXu4bWBXOwa24VNJP7VL+/D6vBXiTPhMpdkomtRD2iaoqOBrmnhPo2WuzuOYGlqVebsssQXLB8NyN1KsnODBW1aM4EhKyzSmuuPLg4ahYZnIMkGHtBNThd66NLXyqUhIVVZdPpovyzQNNEGo0D4Y8cBBeeBOc63BkwrIoH3jSCpoGRXTNVecxxICb7v+LeMooBCacwerB9R07skkUysbvQ1tMxzTFDpG+8w5d1IILoCxmxVEXtHEhnMGihGJlszqWvUgvggJmzU67G9OwvT0m1Z9T6G4x5qwdlu7rJ1W0SoWgU8iDTMXgD3B0ia+AwgF0gXVh9yFZXa2B90tROM8Oy77sOLCU2vw2R6sKui58HTeYsfY8fwiPLOhm1NXpZvPMtTD5yrsHqZF7vkeXKiw/61on9OrsfuHzBwIiNWYr0+savb0UTrWJhmqxcVrpRbr42rxwul/W6yhMf+8Cy8i5i9ozL+g4IsufGkDuy9vYDd9Vdh9RWP31RF2X+vBS1eD3cubYPdW8f06IvgNg+A3rxWCr4wj+CrqLWuwRxlK6LcQx9dceB1xfLXJroU+fVvrE/u/0aTvKPhuhbVYwEJGWZt1mNGPS+wcW2KoEZfZK++kEW9srhHf03x904XvI85vaL7+QMEPXfhRH37swk+0Rvx0C434mdGIn7vwizX4ZQ9+peDXLvxmg+e/pe/44bRo4mEoG36HvP59D/5QMWQYLITx/7iV/2CG/ZB5ZxnEwe+cP2GUPxudxtfkTgcJmsJftgoxnGKfGBw8C8Of8FcM8jeTL5YuT/KY6Grrc5fC3xuWOXY1rFKRJPPxxbeFL6kC/oFD5jstkv7oLPvnet5S8K/afwCUWctL",
"user_name": "write/Write/WriteImpl/FinalizeWrite/FinalizeWrite"
}
}
],
"type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
createTime: '2019-05-31T18:13:51.510065Z'
currentStateTime: '1970-01-01T00:00:00Z'
id: '2019-05-31_11_13_50-1961887211826605435'
location: 'us-central1'
name: 'beamapp-jenkins-0531181348-226633'
projectId: 'apache-beam-testing'
stageStates: []
startTime: '2019-05-31T18:13:51.510065Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-05-31_11_13_50-1961887211826605435]
root: INFO: To access the Dataflow monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-05-31_11_13_50-1961887211826605435?project=apache-beam-testing
root: INFO: Job 2019-05-31_11_13_50-1961887211826605435 is in state
JOB_STATE_RUNNING
root: INFO: 2019-05-31T18:13:54.108Z: JOB_MESSAGE_DETAILED: Checking
permissions granted to controller Service Account.
root: INFO: 2019-05-31T18:13:54.520Z: JOB_MESSAGE_BASIC: Worker configuration:
n1-standard-1 in us-central1-c.
root: INFO: 2019-05-31T18:13:55.318Z: JOB_MESSAGE_DETAILED: Expanding
CoGroupByKey operations into optimizable parts.
root: INFO: 2019-05-31T18:13:55.357Z: JOB_MESSAGE_DEBUG: Combiner lifting
skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey not followed by a
combiner.
root: INFO: 2019-05-31T18:13:55.406Z: JOB_MESSAGE_DEBUG: Combiner lifting
skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2019-05-31T18:13:55.458Z: JOB_MESSAGE_DETAILED: Expanding
GroupByKey operations into optimizable parts.
root: INFO: 2019-05-31T18:13:55.507Z: JOB_MESSAGE_DETAILED: Lifting
ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-05-31T18:13:55.682Z: JOB_MESSAGE_DEBUG: Annotating graph with
Autotuner information.
root: INFO: 2019-05-31T18:13:55.763Z: JOB_MESSAGE_DETAILED: Fusing adjacent
ParDo, Read, Write, and Flatten operations
root: INFO: 2019-05-31T18:13:55.821Z: JOB_MESSAGE_DETAILED: Fusing consumer
group/Write into group/Reify
root: INFO: 2019-05-31T18:13:55.883Z: JOB_MESSAGE_DETAILED: Fusing consumer
group/GroupByWindow into group/Read
root: INFO: 2019-05-31T18:13:55.927Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/GroupByKey/GroupByWindow into
write/Write/WriteImpl/GroupByKey/Read
root: INFO: 2019-05-31T18:13:55.974Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/GroupByKey/Write into
write/Write/WriteImpl/GroupByKey/Reify
root: INFO: 2019-05-31T18:13:56.024Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/Extract into
write/Write/WriteImpl/GroupByKey/GroupByWindow
root: INFO: 2019-05-31T18:13:56.082Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/WindowInto(WindowIntoFn) into write/Write/WriteImpl/Pair
root: INFO: 2019-05-31T18:13:56.138Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/GroupByKey/Reify into
write/Write/WriteImpl/WindowInto(WindowIntoFn)
root: INFO: 2019-05-31T18:13:56.186Z: JOB_MESSAGE_DETAILED: Fusing consumer
split into read/Read
root: INFO: 2019-05-31T18:13:56.241Z: JOB_MESSAGE_DETAILED: Fusing consumer
group/Reify into pair_with_one
root: INFO: 2019-05-31T18:13:56.281Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/Pair into write/Write/WriteImpl/WriteBundles/WriteBundles
root: INFO: 2019-05-31T18:13:56.332Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/WriteBundles/WriteBundles into format
root: INFO: 2019-05-31T18:13:56.383Z: JOB_MESSAGE_DETAILED: Fusing consumer
pair_with_one into split
root: INFO: 2019-05-31T18:13:56.454Z: JOB_MESSAGE_DETAILED: Fusing consumer
count into group/GroupByWindow
root: INFO: 2019-05-31T18:13:56.518Z: JOB_MESSAGE_DETAILED: Fusing consumer
format into count
root: INFO: 2019-05-31T18:13:56.569Z: JOB_MESSAGE_DETAILED: Fusing consumer
write/Write/WriteImpl/InitializeWrite into write/Write/WriteImpl/DoOnce/Read
root: INFO: 2019-05-31T18:13:56.622Z: JOB_MESSAGE_DEBUG: Workflow config is
missing a default resource spec.
root: INFO: 2019-05-31T18:13:56.663Z: JOB_MESSAGE_DEBUG: Adding StepResource
setup and teardown to workflow graph.
root: INFO: 2019-05-31T18:13:56.714Z: JOB_MESSAGE_DEBUG: Adding workflow start
and stop steps.
root: INFO: 2019-05-31T18:13:56.766Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-05-31T18:13:57.005Z: JOB_MESSAGE_DEBUG: Executing wait step
start26
root: INFO: 2019-05-31T18:13:57.194Z: JOB_MESSAGE_BASIC: Executing operation
write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
root: INFO: 2019-05-31T18:13:57.279Z: JOB_MESSAGE_BASIC: Executing operation
write/Write/WriteImpl/GroupByKey/Create
root: INFO: 2019-05-31T18:13:57.291Z: JOB_MESSAGE_DEBUG: Starting worker pool
setup.
root: INFO: 2019-05-31T18:13:57.325Z: JOB_MESSAGE_BASIC: Executing operation
group/Create
root: INFO: 2019-05-31T18:13:57.350Z: JOB_MESSAGE_BASIC: Starting 10 workers in
us-central1-c...
root: INFO: 2019-05-31T18:13:57.571Z: JOB_MESSAGE_DEBUG: Value
"write/Write/WriteImpl/GroupByKey/Session" materialized.
root: INFO: 2019-05-31T18:13:57.635Z: JOB_MESSAGE_DEBUG: Value "group/Session"
materialized.
root: INFO: 2019-05-31T18:13:57.731Z: JOB_MESSAGE_BASIC: Executing operation
read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2019-05-31T18:14:24.006Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 1 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-05-31T18:14:24.084Z: JOB_MESSAGE_DETAILED: Resized worker pool
to 1, though goal was 10. This could be a quota issue.
root: INFO: 2019-05-31T18:14:29.505Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 3 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-05-31T18:14:29.552Z: JOB_MESSAGE_DETAILED: Resized worker pool
to 3, though goal was 10. This could be a quota issue.
root: INFO: 2019-05-31T18:14:45.700Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 4 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-05-31T18:14:45.750Z: JOB_MESSAGE_DETAILED: Resized worker pool
to 4, though goal was 10. This could be a quota issue.
root: INFO: 2019-05-31T18:14:51.223Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 10 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-05-31T18:15:46.260Z: JOB_MESSAGE_DETAILED: Workers have
started successfully.
root: INFO: 2019-05-31T18:15:46.310Z: JOB_MESSAGE_DETAILED: Workers have
started successfully.
apache_beam.io.filesystem: DEBUG: Listing files in
'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1559326427117/results'
apache_beam.io.filesystem: DEBUG: translate_pattern:
'gs://temp-storage-for-end-to-end-tests/py-it-cloud/output/1559326427117/results*'
->
'gs://temp\\-storage\\-for\\-end\\-to\\-end\\-tests/py\\-it\\-cloud/output/1559326427117/results[^/\\\\]*'
root: DEBUG: Connecting using Google Application Default Credentials.
root: INFO: Starting the size estimation of the input
oauth2client.transport: INFO: Attempting refresh to obtain initial access_token
root: INFO: Finished listing 0 files in 0.06306123733520508 seconds.
root: DEBUG: Connecting using Google Application Default Credentials.
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 498.549s
FAILED (failures=1)
ok
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 913.412s
OK
ok
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 921.674s
OK
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:python:integrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to
get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':sdks:python:test-suites:dataflow:py37:integrationTest'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to
get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
BUILD FAILED in 16m 22s
2019-05-31 18:29:15,517 4d9b08dd MainThread beam_integration_benchmark(1/1)
ERROR Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 760, in RunBenchmark
DoRunPhase(spec, collector, detailed_timer)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 609, in DoRunPhase
samples = spec.BenchmarkRun(spec)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",>
line 160, in Run
job_type=job_type)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",>
line 90, in SubmitJob
assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2019-05-31 18:29:15,519 4d9b08dd MainThread beam_integration_benchmark(1/1)
INFO Cleaning up benchmark beam_integration_benchmark
2019-05-31 18:29:15,521 4d9b08dd MainThread beam_integration_benchmark(1/1)
ERROR Exception running benchmark
Traceback (most recent call last):
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 903, in RunBenchmarkTask
RunBenchmark(spec, collector)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 760, in RunBenchmark
DoRunPhase(spec, collector, detailed_timer)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py",>
line 609, in DoRunPhase
samples = spec.BenchmarkRun(spec)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py",>
line 160, in Run
job_type=job_type)
File
"<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py",>
line 90, in SubmitJob
assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2019-05-31 18:29:15,521 4d9b08dd MainThread beam_integration_benchmark(1/1)
ERROR Benchmark 1/1 beam_integration_benchmark (UID:
beam_integration_benchmark0) failed. Execution will continue.
2019-05-31 18:29:15,522 4d9b08dd MainThread beam_integration_benchmark(1/1)
INFO Benchmark run statuses:
---------------------------------------------------------------------------------
Name UID Status Failed
Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark beam_integration_benchmark0 FAILED
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2019-05-31 18:29:15,522 4d9b08dd MainThread beam_integration_benchmark(1/1)
INFO Complete logs can be found at:
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/runs/4d9b08dd/pkb.log>
2019-05-31 18:29:15,522 4d9b08dd MainThread beam_integration_benchmark(1/1)
INFO Completion statuses can be found at:
<https://builds.apache.org/job/beam_PerformanceTests_WordCountIT_Py27/ws/runs/4d9b08dd/completion_statuses.json>
Build step 'Execute shell' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]