See
<https://builds.apache.org/job/beam_PostCommit_Python2/1838/display/redirect>
Changes:
------------------------------------------
[...truncated 10.06 MB...]
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "assert_that/Unkey.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s21"
},
"serialized_fn": "<string of 1056 bytes>",
"user_name": "assert_that/Unkey"
}
},
{
"kind": "ParallelDo",
"name": "s23",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "_equal"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [],
"pipeline_proto_coder_id":
"ref_Coder_FastPrimitivesCoder_5"
}
],
"is_pair_like": true,
"pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_5"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "None",
"user_name": "assert_that/Match.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "None",
"step_name": "s22"
},
"serialized_fn": "<string of 1736 bytes>",
"user_name": "assert_that/Match"
}
}
],
"type": "JOB_TYPE_BATCH"
}
INFO:apache_beam.runners.dataflow.internal.apiclient:Create job: <Job
createTime: u'2020-02-28T13:27:24.061511Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2020-02-28_05_27_22-3449477874967220235'
location: u'us-central1'
name: u'beamapp-jenkins-0228130040-865094'
projectId: u'apache-beam-testing'
stageStates: []
startTime: u'2020-02-28T13:27:24.061511Z'
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
INFO:apache_beam.runners.dataflow.internal.apiclient:Created job with id:
[2020-02-28_05_27_22-3449477874967220235]
INFO:apache_beam.runners.dataflow.internal.apiclient:To access the Dataflow
monitoring console, please navigate to
https://console.cloud.google.com/dataflow/jobs/us-central1/2020-02-28_05_27_22-3449477874967220235?project=apache-beam-testing
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2020-02-28_05_27_22-3449477874967220235 is in state JOB_STATE_RUNNING
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:22.095Z:
JOB_MESSAGE_DETAILED: Autoscaling is enabled for job
2020-02-28_05_27_22-3449477874967220235. The number of workers will be between
1 and 1000.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:22.096Z:
JOB_MESSAGE_DETAILED: Autoscaling was automatically enabled for job
2020-02-28_05_27_22-3449477874967220235.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:28.805Z:
JOB_MESSAGE_DETAILED: Checking permissions granted to controller Service
Account.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:29.797Z:
JOB_MESSAGE_BASIC: Worker configuration: n1-standard-1 in us-central1-c.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.431Z:
JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.472Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step
assert_that/Group/GroupByKey: GroupByKey not followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.543Z:
JOB_MESSAGE_DEBUG: Combiner lifting skipped for step read from
datastore/GroupByKey: GroupByKey not followed by a combiner.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.582Z:
JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into optimizable parts.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.620Z:
JOB_MESSAGE_DETAILED: Lifting ValueCombiningMappingFns into
MergeBucketsMappingFns
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.793Z:
JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.849Z:
JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.884Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/SplitQuery into read
from datastore/UserQuery/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.923Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/GroupByKey/Reify into
read from datastore/SplitQuery
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:30.963Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/GroupByKey/Write into
read from datastore/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.007Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from
datastore/GroupByKey/GroupByWindow into read from datastore/GroupByKey/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.044Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/Values into read from
datastore/GroupByKey/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.085Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/Flatten into read
from datastore/Values
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.127Z:
JOB_MESSAGE_DETAILED: Fusing consumer read from datastore/Read into read from
datastore/Flatten
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.164Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/KeyWithVoid into read from
datastore/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.201Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Partial
into Globally/CombineGlobally(CountCombineFn)/KeyWithVoid
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.243Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Reify into
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Partial
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.280Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Write into
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.317Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine into
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.353Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Extract into
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.389Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/UnKey into
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Extract
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.426Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s19 for input s17.None
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.462Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
assert_that/Group/GroupByKey/Reify, through flatten assert_that/Group/Flatten,
into producer assert_that/Group/pair_with_0
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.497Z:
JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/GroupByKey/GroupByWindow into
assert_that/Group/GroupByKey/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.530Z:
JOB_MESSAGE_DETAILED: Fusing consumer
assert_that/Group/Map(_merge_tagged_vals_under_key) into
assert_that/Group/GroupByKey/GroupByWindow
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.566Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Unkey into
assert_that/Group/Map(_merge_tagged_vals_under_key)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.601Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Match into assert_that/Unkey
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.636Z:
JOB_MESSAGE_DETAILED: Unzipping flatten s19-u40 for input s20-reify-value9-c38
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.663Z:
JOB_MESSAGE_DETAILED: Fusing unzipped copy of
assert_that/Group/GroupByKey/Write, through flatten
assert_that/Group/Flatten/Unzipped-1, into producer
assert_that/Group/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.704Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/pair_with_0 into
assert_that/Create/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.740Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/GroupByKey/Reify into
assert_that/Group/pair_with_1
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.773Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/GroupByKey/Write into
assert_that/Group/GroupByKey/Reify
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.812Z:
JOB_MESSAGE_DETAILED: Fusing consumer
Globally/CombineGlobally(CountCombineFn)/InjectDefault/InjectDefault into
Globally/CombineGlobally(CountCombineFn)/DoOnce/Read
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.851Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/WindowInto(WindowIntoFn) into
Globally/CombineGlobally(CountCombineFn)/InjectDefault/InjectDefault
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.888Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/ToVoidKey into
assert_that/WindowInto(WindowIntoFn)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.928Z:
JOB_MESSAGE_DETAILED: Fusing consumer assert_that/Group/pair_with_1 into
assert_that/ToVoidKey
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.965Z:
JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:31.993Z:
JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.031Z:
JOB_MESSAGE_DEBUG: Adding workflow start and stop steps.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.072Z:
JOB_MESSAGE_DEBUG: Assigning stage ids.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.255Z:
JOB_MESSAGE_DEBUG: Executing wait step start51
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.319Z:
JOB_MESSAGE_BASIC: Executing operation assert_that/Group/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.359Z:
JOB_MESSAGE_BASIC: Executing operation read from datastore/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.370Z:
JOB_MESSAGE_DEBUG: Starting worker pool setup.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.394Z:
JOB_MESSAGE_BASIC: Executing operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.405Z:
JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-c...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.455Z:
JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.470Z:
JOB_MESSAGE_BASIC: Finished operation read from datastore/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.470Z:
JOB_MESSAGE_BASIC: Finished operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Create
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.523Z:
JOB_MESSAGE_DEBUG: Value "assert_that/Group/GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.560Z:
JOB_MESSAGE_DEBUG: Value
"Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Session"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.645Z:
JOB_MESSAGE_DEBUG: Value "read from datastore/GroupByKey/Session" materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.687Z:
JOB_MESSAGE_BASIC: Executing operation
assert_that/Create/Read+assert_that/Group/pair_with_0+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:32.726Z:
JOB_MESSAGE_BASIC: Executing operation read from datastore/UserQuery/Read+read
from datastore/SplitQuery+read from datastore/GroupByKey/Reify+read from
datastore/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:27:51.556Z:
JOB_MESSAGE_WARNING: Your project already contains 100 Dataflow-created metric
descriptors and Stackdriver will not create new Dataflow custom metrics for
this job. Each unique user-defined metric name (independent of the DoFn in
which it is defined) produces a new metric descriptor. To delete old / unused
metric descriptors see
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list
and
https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:28:00.227Z:
JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 based on
the rate of progress in the currently running step(s).
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:29:14.458Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:29:14.492Z:
JOB_MESSAGE_DETAILED: Workers have started successfully.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:16.160Z:
JOB_MESSAGE_BASIC: Finished operation read from datastore/UserQuery/Read+read
from datastore/SplitQuery+read from datastore/GroupByKey/Reify+read from
datastore/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:16.271Z:
JOB_MESSAGE_BASIC: Executing operation read from datastore/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:16.328Z:
JOB_MESSAGE_BASIC: Finished operation read from datastore/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:16.396Z:
JOB_MESSAGE_BASIC: Executing operation read from datastore/GroupByKey/Read+read
from datastore/GroupByKey/GroupByWindow+read from datastore/Values+read from
datastore/Flatten+read from
datastore/Read+Globally/CombineGlobally(CountCombineFn)/KeyWithVoid+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Partial+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Reify+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:22.575Z:
JOB_MESSAGE_BASIC: Finished operation
assert_that/Create/Read+assert_that/Group/pair_with_0+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:29.223Z:
JOB_MESSAGE_BASIC: Finished operation read from datastore/GroupByKey/Read+read
from datastore/GroupByKey/GroupByWindow+read from datastore/Values+read from
datastore/Flatten+read from
datastore/Read+Globally/CombineGlobally(CountCombineFn)/KeyWithVoid+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Partial+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Reify+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:29.312Z:
JOB_MESSAGE_BASIC: Executing operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:29.366Z:
JOB_MESSAGE_BASIC: Finished operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:29.432Z:
JOB_MESSAGE_BASIC: Executing operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Read+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Extract+Globally/CombineGlobally(CountCombineFn)/UnKey
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:34.843Z:
JOB_MESSAGE_BASIC: Finished operation
Globally/CombineGlobally(CountCombineFn)/CombinePerKey/GroupByKey/Read+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine+Globally/CombineGlobally(CountCombineFn)/CombinePerKey/Combine/Extract+Globally/CombineGlobally(CountCombineFn)/UnKey
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:34.929Z:
JOB_MESSAGE_DEBUG: Value "Globally/CombineGlobally(CountCombineFn)/UnKey.out"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:34.992Z:
JOB_MESSAGE_BASIC: Executing operation
Globally/CombineGlobally(CountCombineFn)/InjectDefault/_UnpickledSideInput(UnKey.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:35.042Z:
JOB_MESSAGE_BASIC: Finished operation
Globally/CombineGlobally(CountCombineFn)/InjectDefault/_UnpickledSideInput(UnKey.out.0)
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:35.096Z:
JOB_MESSAGE_DEBUG: Value
"Globally/CombineGlobally(CountCombineFn)/InjectDefault/_UnpickledSideInput(UnKey.out.0).output"
materialized.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:35.169Z:
JOB_MESSAGE_BASIC: Executing operation
Globally/CombineGlobally(CountCombineFn)/DoOnce/Read+Globally/CombineGlobally(CountCombineFn)/InjectDefault/InjectDefault+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/pair_with_1+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:39.942Z:
JOB_MESSAGE_BASIC: Finished operation
Globally/CombineGlobally(CountCombineFn)/DoOnce/Read+Globally/CombineGlobally(CountCombineFn)/InjectDefault/InjectDefault+assert_that/WindowInto(WindowIntoFn)+assert_that/ToVoidKey+assert_that/Group/pair_with_1+assert_that/Group/GroupByKey/Reify+assert_that/Group/GroupByKey/Write
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:40.016Z:
JOB_MESSAGE_BASIC: Executing operation assert_that/Group/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:40.070Z:
JOB_MESSAGE_BASIC: Finished operation assert_that/Group/GroupByKey/Close
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:40.149Z:
JOB_MESSAGE_BASIC: Executing operation
assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:49.559Z:
JOB_MESSAGE_BASIC: Finished operation
assert_that/Group/GroupByKey/Read+assert_that/Group/GroupByKey/GroupByWindow+assert_that/Group/Map(_merge_tagged_vals_under_key)+assert_that/Unkey+assert_that/Match
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:49.732Z:
JOB_MESSAGE_DEBUG: Executing success step success49
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:50.040Z:
JOB_MESSAGE_DETAILED: Cleaning up.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:50.120Z:
JOB_MESSAGE_DEBUG: Starting worker pool teardown.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:32:50.158Z:
JOB_MESSAGE_BASIC: Stopping worker pool...
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:34:11.367Z:
JOB_MESSAGE_DETAILED: Autoscaling: Resized worker pool from 1 to 0.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:34:11.417Z:
JOB_MESSAGE_BASIC: Worker pool stopped.
INFO:apache_beam.runners.dataflow.dataflow_runner:2020-02-28T13:34:11.459Z:
JOB_MESSAGE_DEBUG: Tearing down pending resources...
INFO:apache_beam.runners.dataflow.dataflow_runner:Job
2020-02-28_05_27_22-3449477874967220235 is in state JOB_STATE_DONE
test_datastore_write_limit
(apache_beam.io.gcp.datastore_write_it_test.DatastoreWriteIT) ... ok
----------------------------------------------------------------------
XML: nosetests-postCommitIT-df.xml
----------------------------------------------------------------------
XML:
<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/nosetests.xml>
----------------------------------------------------------------------
Ran 54 tests in 3466.859s
OK (SKIP=7)
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Python2/ws/src/sdks/python/test-suites/portable/py2/build.gradle'>
line: 143
* What went wrong:
Execution failed for task
':sdks:python:test-suites:portable:py2:crossLanguagePortableWordCount'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 59m 24s
124 actionable tasks: 98 executed, 23 from cache, 3 up-to-date
Publishing build scan...
Publishing failed.
The response from https://scans-in.gradle.com/in/5.2.1/2.3 was not from the
build scan server.
Your network environment may be interfering, or the service may be unavailable.
If you believe this to be in error, please report this problem via
https://gradle.com/scans/help/plugin and include the following via copy/paste:
----------
Gradle version: 5.2.1
Plugin version: 2.3
Request URL: https://scans-in.gradle.com/in/5.2.1/2.3
Request ID: 80bf44ed-b67b-4b39-b02f-4b915062ba52
Response status code: 413
Response content type: text/html
Response server type: cloudflare
----------
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]