See
<https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Dataflow_Batch/7/display/redirect>
------------------------------------------
[...truncated 166.32 KB...]
"is_wrapper": true
},
"output_name": "out",
"user_name": "Read synthetic.out"
}
],
"user_name": "Read synthetic"
}
},
{
"kind": "ParallelDo",
"name": "s2",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "MeasureTime",
"type": "STRING",
"value":
"apache_beam.testing.load_tests.load_test_metrics_utils.MeasureTime"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "Measure time: Start.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s1"
},
"serialized_fn": "<string of 588 bytes>",
"user_name": "Measure time: Start"
}
},
{
"kind": "ParallelDo",
"name": "s3",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CounterOperation",
"type": "STRING",
"value":
"apache_beam.testing.load_tests.pardo_test.CounterOperation"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "Step: 0.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s2"
},
"serialized_fn": "<string of 1384 bytes>",
"user_name": "Step: 0"
}
},
{
"kind": "ParallelDo",
"name": "s4",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "MeasureTime",
"type": "STRING",
"value":
"apache_beam.testing.load_tests.load_test_metrics_utils.MeasureTime"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "Measure time: End.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s3"
},
"serialized_fn": "<string of 504 bytes>",
"user_name": "Measure time: End"
}
}
],
"type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
createTime: u'2019-07-28T14:06:56.599256Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2019-07-28_07_06_47-14935165707617073420'
location: u'us-central1'
name: u'load-tests-python-dataflow-batch-pardo-4-0728130005'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2019-07-28T14:06:56.599256Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2019-07-28_07_06_47-14935165707617073420]
root: INFO: To access the Dataflow monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobsDetail/locations/us-central1/jobs/2019-07-28_07_06_47-14935165707617073420?project=apache-beam-testing
root: INFO: Job 2019-07-28_07_06_47-14935165707617073420 is in state
JOB_STATE_PENDING
root: INFO: 2019-07-28T14:06:58.398Z: JOB_MESSAGE_DETAILED: Checking
permissions granted to controller Service Account.
root: INFO: 2019-07-28T14:06:58.827Z: JOB_MESSAGE_BASIC: Worker configuration:
n1-standard-1 in us-central1-a.
root: INFO: 2019-07-28T14:06:59.514Z: JOB_MESSAGE_DETAILED: Expanding
CoGroupByKey operations into optimizable parts.
root: INFO: 2019-07-28T14:06:59.562Z: JOB_MESSAGE_DETAILED: Expanding
GroupByKey operations into optimizable parts.
root: INFO: 2019-07-28T14:06:59.594Z: JOB_MESSAGE_DETAILED: Lifting
ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2019-07-28T14:06:59.629Z: JOB_MESSAGE_DEBUG: Annotating graph with
Autotuner information.
root: INFO: 2019-07-28T14:06:59.682Z: JOB_MESSAGE_DETAILED: Fusing adjacent
ParDo, Read, Write, and Flatten operations
root: INFO: 2019-07-28T14:06:59.712Z: JOB_MESSAGE_DETAILED: Fusing consumer
Measure time: Start into Read synthetic
root: INFO: 2019-07-28T14:06:59.744Z: JOB_MESSAGE_DETAILED: Fusing consumer
Measure time: End into Step: 0
root: INFO: 2019-07-28T14:06:59.781Z: JOB_MESSAGE_DETAILED: Fusing consumer
Step: 0 into Measure time: Start
root: INFO: 2019-07-28T14:06:59.826Z: JOB_MESSAGE_DEBUG: Workflow config is
missing a default resource spec.
root: INFO: 2019-07-28T14:06:59.875Z: JOB_MESSAGE_DEBUG: Adding StepResource
setup and teardown to workflow graph.
root: INFO: 2019-07-28T14:06:59.908Z: JOB_MESSAGE_DEBUG: Adding workflow start
and stop steps.
root: INFO: 2019-07-28T14:06:59.945Z: JOB_MESSAGE_DEBUG: Assigning stage ids.
root: INFO: 2019-07-28T14:07:00.106Z: JOB_MESSAGE_DEBUG: Executing wait step
start3
root: INFO: 2019-07-28T14:07:00.188Z: JOB_MESSAGE_BASIC: Executing operation
Read synthetic+Measure time: Start+Step: 0+Measure time: End
root: INFO: 2019-07-28T14:07:00.244Z: JOB_MESSAGE_DEBUG: Starting worker pool
setup.
root: INFO: 2019-07-28T14:07:00.271Z: JOB_MESSAGE_BASIC: Starting 5 workers in
us-central1-a...
root: INFO: 2019-07-28T14:07:00.406Z: JOB_MESSAGE_BASIC: Worker configuration:
n1-standard-1 in us-central1-a.
root: INFO: Job 2019-07-28_07_06_47-14935165707617073420 is in state
JOB_STATE_RUNNING
root: INFO: 2019-07-28T14:07:51.986Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 4 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-07-28T14:07:52.020Z: JOB_MESSAGE_DETAILED: Resized worker pool
to 4, though goal was 5. This could be a quota issue.
root: INFO: 2019-07-28T14:08:13.910Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised
the number of workers to 5 based on the rate of progress in the currently
running step(s).
root: INFO: 2019-07-28T14:08:29.630Z: JOB_MESSAGE_DETAILED: Workers have
started successfully.
root: INFO: 2019-07-28T14:08:29.668Z: JOB_MESSAGE_DETAILED: Workers have
started successfully.
root: INFO: 2019-07-28T14:10:00.412Z: JOB_MESSAGE_BASIC: Worker configuration:
n1-standard-1 in us-central1-a.
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Dataflow_Batch/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 1 test in 330.639s
FAILED (errors=1)
> Task :sdks:python:apache_beam:testing:load_tests:run FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://builds.apache.org/job/beam_LoadTests_Python_ParDo_Dataflow_Batch/ws/src/sdks/python/apache_beam/testing/load_tests/build.gradle'>
line: 49
* What went wrong:
Execution failed for task ':sdks:python:apache_beam:testing:load_tests:run'.
> error occurred
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 5m 42s
4 actionable tasks: 3 executed, 1 up-to-date
Publishing build scan...
https://gradle.com/s/umac6dsivhagk
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]