See
<https://builds.apache.org/job/beam_PostCommit_Python_Verify/2316/display/redirect?page=changes>
Changes:
[iemejia] Update maven-dependency-plugin to version 3.0.1
------------------------------------------
[...truncated 576.03 KB...]
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
}
]
},
"output_name": "out",
"user_name":
"write/Write/WriteImpl/FinalizeWrite/SideInput-s16.output"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s14"
},
"user_name": "write/Write/WriteImpl/FinalizeWrite/SideInput-s16"
}
},
{
"kind": "ParallelDo",
"name": "s17",
"properties": {
"display_data": [
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.CallableWrapperDoFn",
"type": "STRING",
"value": "_finalize_write"
},
{
"key": "fn",
"label": "Transform Function",
"namespace": "apache_beam.transforms.core.ParDo",
"shortValue": "CallableWrapperDoFn",
"type": "STRING",
"value": "apache_beam.transforms.core.CallableWrapperDoFn"
}
],
"non_parallel_inputs": {
"SideInput-s15": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s15"
},
"SideInput-s16": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "SideInput-s16"
}
},
"output_info": [
{
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": [
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
},
{
"@type":
"FastPrimitivesCoder$eNprYEpOLEhMzkiNT0pNzNVLzk9JLSqGUlxuicUlAUWZuZklmWWpxc4gQa5CBs3GQsbaQqZQ/vi0xJycpMTk7Hiw+kJmPEYFZCZn56RCjWABGsFaW8iWVJykBwDlGS3/",
"component_encodings": []
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
},
"output_name": "out",
"user_name": "write/Write/WriteImpl/FinalizeWrite.out"
}
],
"parallel_input": {
"@type": "OutputReference",
"output_name": "out",
"step_name": "s7"
},
"serialized_fn": "<string of 1056 bytes>",
"user_name": "write/Write/WriteImpl/FinalizeWrite/Do"
}
}
],
"type": "JOB_TYPE_BATCH"
}
root: INFO: Create job: <Job
createTime: u'2017-05-25T17:56:08.949185Z'
currentStateTime: u'1970-01-01T00:00:00Z'
id: u'2017-05-25_10_56_08-15254750624546520082'
location: u'global'
name: u'beamapp-jenkins-0525175607-304078'
projectId: u'apache-beam-testing'
stageStates: []
steps: []
tempFiles: []
type: TypeValueValuesEnum(JOB_TYPE_BATCH, 1)>
root: INFO: Created job with id: [2017-05-25_10_56_08-15254750624546520082]
root: INFO: To access the Dataflow monitoring console, please navigate to
https://console.developers.google.com/project/apache-beam-testing/dataflow/job/2017-05-25_10_56_08-15254750624546520082
root: INFO: Job 2017-05-25_10_56_08-15254750624546520082 is in state
JOB_STATE_RUNNING
root: INFO: 2017-05-25T17:56:08.374Z: JOB_MESSAGE_WARNING: (d3b3c2deb1437c0e):
Setting the number of workers (1) disables autoscaling for this job. If you are
trying to cap autoscaling, consider only setting max_num_workers. If you want
to disable autoscaling altogether, the documented way is to explicitly use
autoscalingAlgorithm=NONE.
root: INFO: 2017-05-25T17:56:10.568Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d068):
Checking required Cloud APIs are enabled.
root: INFO: 2017-05-25T17:56:11.791Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1de57):
Combiner lifting skipped for step write/Write/WriteImpl/GroupByKey: GroupByKey
not followed by a combiner.
root: INFO: 2017-05-25T17:56:11.793Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d699):
Combiner lifting skipped for step group: GroupByKey not followed by a combiner.
root: INFO: 2017-05-25T17:56:11.796Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dedb):
Expanding GroupByKey operations into optimizable parts.
root: INFO: 2017-05-25T17:56:11.798Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d71d):
Lifting ValueCombiningMappingFns into MergeBucketsMappingFns
root: INFO: 2017-05-25T17:56:11.804Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1dfe3):
Annotating graph with Autotuner information.
root: INFO: 2017-05-25T17:56:11.817Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d067):
Fusing adjacent ParDo, Read, Write, and Flatten operations
root: INFO: 2017-05-25T17:56:11.819Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d8a9):
Fusing consumer split into read/Read
root: INFO: 2017-05-25T17:56:11.821Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d0eb):
Fusing consumer group/Write into group/Reify
root: INFO: 2017-05-25T17:56:11.823Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d92d):
Fusing consumer group/GroupByWindow into group/Read
root: INFO: 2017-05-25T17:56:11.825Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d16f):
Fusing consumer write/Write/WriteImpl/GroupByKey/GroupByWindow into
write/Write/WriteImpl/GroupByKey/Read
root: INFO: 2017-05-25T17:56:11.827Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d9b1):
Fusing consumer write/Write/WriteImpl/GroupByKey/Write into
write/Write/WriteImpl/GroupByKey/Reify
root: INFO: 2017-05-25T17:56:11.832Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1da35):
Fusing consumer write/Write/WriteImpl/WindowInto(WindowIntoFn) into
write/Write/WriteImpl/Pair
root: INFO: 2017-05-25T17:56:11.834Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d277):
Fusing consumer write/Write/WriteImpl/GroupByKey/Reify into
write/Write/WriteImpl/WindowInto(WindowIntoFn)
root: INFO: 2017-05-25T17:56:11.836Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dab9):
Fusing consumer pair_with_one into split
root: INFO: 2017-05-25T17:56:11.838Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d2fb):
Fusing consumer group/Reify into pair_with_one
root: INFO: 2017-05-25T17:56:11.840Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1db3d):
Fusing consumer write/Write/WriteImpl/WriteBundles/Do into format
root: INFO: 2017-05-25T17:56:11.842Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d37f):
Fusing consumer write/Write/WriteImpl/Pair into
write/Write/WriteImpl/WriteBundles/Do
root: INFO: 2017-05-25T17:56:11.845Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dbc1):
Fusing consumer format into count
root: INFO: 2017-05-25T17:56:11.847Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1d403):
Fusing consumer write/Write/WriteImpl/Extract into
write/Write/WriteImpl/GroupByKey/GroupByWindow
root: INFO: 2017-05-25T17:56:11.850Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dc45):
Fusing consumer count into group/GroupByWindow
root: INFO: 2017-05-25T17:56:11.859Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dd4d):
Fusing consumer write/Write/WriteImpl/InitializeWrite into
write/Write/WriteImpl/DoOnce/Read
root: INFO: 2017-05-25T17:56:11.931Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d509):
Workflow config is missing a default resource spec.
root: INFO: 2017-05-25T17:56:11.935Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1dd4b):
Adding StepResource setup and teardown to workflow graph.
root: INFO: 2017-05-25T17:56:11.938Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d58d):
Adding workflow start and stop steps.
root: INFO: 2017-05-25T17:56:11.942Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1ddcf):
Assigning stage ids.
root: INFO: 2017-05-25T17:56:11.982Z: JOB_MESSAGE_DEBUG: (7a4c9a7af0e47d65):
Executing wait step start25
root: INFO: 2017-05-25T17:56:11.992Z: JOB_MESSAGE_BASIC: (7a4c9a7af0e47353):
Executing operation
write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
root: INFO: 2017-05-25T17:56:11.995Z: JOB_MESSAGE_BASIC: (ce3f979b8433dae4):
Executing operation group/Create
root: INFO: 2017-05-25T17:56:12.197Z: JOB_MESSAGE_DEBUG: (43e85710b9452efe):
Starting worker pool setup.
root: INFO: 2017-05-25T17:56:12.199Z: JOB_MESSAGE_BASIC: (43e85710b9452694):
Starting 1 workers...
root: INFO: 2017-05-25T17:56:12.213Z: JOB_MESSAGE_DEBUG: (ce3f979b8433dc81):
Value "group/Session" materialized.
root: INFO: 2017-05-25T17:56:12.224Z: JOB_MESSAGE_BASIC: (ce3f979b8433dc29):
Executing operation read/Read+split+pair_with_one+group/Reify+group/Write
root: INFO: 2017-05-25T17:58:27.159Z: JOB_MESSAGE_DETAILED: (16fafad61e9ffc7a):
Workers have started successfully.
root: INFO: 2017-05-25T17:59:40.820Z: JOB_MESSAGE_ERROR: (8bb4a045e88fffdf):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 706, in run
self._load_main_session(self.local_staging_directory)
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 446, in _load_main_session
pickler.load_session(session_file)
File
"/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line
247, in load_session
return dill.load_session(file_path)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in
load_session
module = unpickler.load()
File "/usr/lib/python2.7/pickle.py", line 858, in load
dispatch[key](self)
File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
value = func(*args)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in
_import_module
return __import__(import_name)
ImportError: No module named gen_protos
root: INFO: 2017-05-25T17:59:42.923Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff147):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 706, in run
self._load_main_session(self.local_staging_directory)
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 446, in _load_main_session
pickler.load_session(session_file)
File
"/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line
247, in load_session
return dill.load_session(file_path)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in
load_session
module = unpickler.load()
File "/usr/lib/python2.7/pickle.py", line 858, in load
dispatch[key](self)
File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
value = func(*args)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in
_import_module
return __import__(import_name)
ImportError: No module named gen_protos
root: INFO: 2017-05-25T17:59:44.985Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff2af):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 706, in run
self._load_main_session(self.local_staging_directory)
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 446, in _load_main_session
pickler.load_session(session_file)
File
"/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line
247, in load_session
return dill.load_session(file_path)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in
load_session
module = unpickler.load()
File "/usr/lib/python2.7/pickle.py", line 858, in load
dispatch[key](self)
File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
value = func(*args)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in
_import_module
return __import__(import_name)
ImportError: No module named gen_protos
root: INFO: 2017-05-25T17:59:47.040Z: JOB_MESSAGE_ERROR: (8bb4a045e88ff417):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 706, in run
self._load_main_session(self.local_staging_directory)
File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py",
line 446, in _load_main_session
pickler.load_session(session_file)
File
"/usr/local/lib/python2.7/dist-packages/apache_beam/internal/pickler.py", line
247, in load_session
return dill.load_session(file_path)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 363, in
load_session
module = unpickler.load()
File "/usr/lib/python2.7/pickle.py", line 858, in load
dispatch[key](self)
File "/usr/lib/python2.7/pickle.py", line 1133, in load_reduce
value = func(*args)
File "/usr/local/lib/python2.7/dist-packages/dill/dill.py", line 766, in
_import_module
return __import__(import_name)
ImportError: No module named gen_protos
root: INFO: 2017-05-25T17:59:47.431Z: JOB_MESSAGE_DEBUG: (7a4c9a7af0e47bc0):
Executing failure step failure24
root: INFO: 2017-05-25T17:59:47.434Z: JOB_MESSAGE_ERROR: (7a4c9a7af0e47dba):
Workflow failed. Causes: (7a4c9a7af0e477cc):
S01:write/Write/WriteImpl/DoOnce/Read+write/Write/WriteImpl/InitializeWrite
failed., (bf54651899a3d481): Failed to split source.
root: INFO: 2017-05-25T17:59:47.508Z: JOB_MESSAGE_DETAILED: (96fbd1da3af1db3a):
Cleaning up.
root: INFO: 2017-05-25T17:59:47.511Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d37c):
Starting worker pool teardown.
root: INFO: 2017-05-25T17:59:47.514Z: JOB_MESSAGE_BASIC: (96fbd1da3af1dbbe):
Stopping worker pool...
root: INFO: 2017-05-25T18:01:07.524Z: JOB_MESSAGE_BASIC: (96fbd1da3af1d821):
Worker pool stopped.
root: INFO: 2017-05-25T18:01:07.595Z: JOB_MESSAGE_DEBUG: (96fbd1da3af1d929):
Tearing down pending resources...
root: INFO: Job 2017-05-25_10_56_08-15254750624546520082 is in state
JOB_STATE_FAILED
--------------------- >> end captured logging << ---------------------
----------------------------------------------------------------------
Ran 2 tests in 393.584s
FAILED (errors=1)
Found:
https://console.cloud.google.com/dataflow/job/2017-05-25_10_56_07-14183265070979746962?project=apache-beam-testing
Found:
https://console.cloud.google.com/dataflow/job/2017-05-25_10_56_08-15254750624546520082?project=apache-beam-testing
Build step 'Execute shell' marked build as failure