See
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Flink/118/display/redirect?page=changes>
Changes:
[noreply] Merge pull request #16976 from [BEAM-14010] [Website] Add Playground
[noreply] [BEAM-12447] Upgrade cloud build client and add/cleanup options
(#17032)
------------------------------------------
[...truncated 19.50 MB...]
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:ptransform_execution_time:total_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM": "fn/write/ref_PCollection_PCollection_16:0"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:ptransform_execution_time:total_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM": "fn/read/pcollection_4:0"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:ptransform_execution_time:total_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM":
"TopPerPrefix/LargestPerKey(5)/TopPerKey(5)/CombinePerKey(TopCombineFn)/ExtractOutputs"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:pardo_execution_time:start_bundle_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM": "fn/read/pcollection_4:0"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:element_count:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "Bw==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "pcollection_5"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:pardo_execution_time:start_bundle_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM":
"TopPerPrefix/LargestPerKey(5)/TopPerKey(5)/CombinePerKey(TopCombineFn)/ExtractOutputs"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:pardo_execution_time:start_bundle_msecs:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:sum_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "AA==",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PTRANSFORM": "fn/write/ref_PCollection_PCollection_16:0"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:sampled_byte_size:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:distribution_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "B4ECHTY=",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "ref_PCollection_PCollection_11"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:sampled_byte_size:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:distribution_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "B74CJEU=",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "pcollection_4"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:sampled_byte_size:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:distribution_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "B/gBHTA=",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "ref_PCollection_PCollection_10"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:sampled_byte_size:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:distribution_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "B4YCHzI=",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "pcollection_5"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' },
{'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"urn": "beam:metric:sampled_byte_size:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"type": "beam:metrics:distribution_int64:v1",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"payload": "B4ECHTY=",'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"labels": {'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'
"PCOLLECTION": "ref_PCollection_PCollection_16"'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }]'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b' }'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'}'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.taskexecutor.TaskExecutor
handleOnStopException'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopped TaskExecutor akka://flink/user/rpc/taskmanager_0.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.rpc.akka.AkkaRpcService stopService'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopping Akka RPC service.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.rpc.akka.AkkaRpcService stopService'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopping Akka RPC service.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.rpc.akka.AkkaRpcService
lambda$stopService$8'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopped Akka RPC service.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.blob.AbstractBlobCache close'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Shutting down BLOB cache'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.blob.AbstractBlobCache close'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Shutting down BLOB cache'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.blob.BlobServer close'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopped BLOB server at 0.0.0.0:44403'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:02 AM org.apache.flink.runtime.rpc.akka.AkkaRpcService
lambda$stopService$8'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Stopped Akka RPC service.'
INFO apache_beam.io.gcp.gcsio:gcsio.py:575 Finished listing 7 files in
0.061466217041015625 seconds.
PASSED [ 17%]
apache_beam/examples/complete/distribopt_test.py::DistribOptimizationTest::test_basics
-------------------------------- live log call ---------------------------------
INFO root:distribopt_test.py:48 Creating temp file:
/tmp/tmp1lqjgc8q/input.txt
INFO root:pipeline.py:188 Missing pipeline option (runner). Executing
pipeline using the default runner: DirectRunner.
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.7 interpreter.
INFO root:environments.py:380 Default Python SDK image for environment is
apache/beam_python3.7_sdk:2.38.0.dev
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function annotate_downstream_side_inputs at
0x7f146c64e3b0> ====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function fix_side_input_pcoll_coders at 0x7f146c64e4d0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function pack_combiners at 0x7f146c64e9e0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function lift_combiners at 0x7f146c64ea70>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function expand_sdf at 0x7f146c64ec20>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function expand_gbk at 0x7f146c64ecb0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function sink_flattens at 0x7f146c64edd0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function greedily_fuse at 0x7f146c64ee60>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function read_to_impulse at 0x7f146c64eef0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function impulse_to_input at 0x7f146c64ef80>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function sort_stages at 0x7f146c653200>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function setup_timer_mapping at 0x7f146c653170>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function populate_data_channel_coders at 0x7f146c653290>
====================
INFO apache_beam.runners.worker.statecache:statecache.py:172 Creating state
cache with size 100
INFO
apache_beam.runners.portability.fn_api_runner.worker_handlers:worker_handlers.py:894
Created Worker handler
<apache_beam.runners.portability.fn_api_runner.worker_handlers.EmbeddedWorkerHandler
object at 0x7f143e231290> for environment
ref_Environment_default_environment_1 (beam:env:embedded_python:v1, b'')
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
((((ref_AppliedPTransform_read-Read-Impulse_4)+(ref_AppliedPTransform_read-Read-Map-lambda-at-iobase-py-898-_5))+(read/Read/SDFBoundedSourceReader/ParDo(SDFBoundedSourceDoFn)/PairWithRestriction))+(read/Read/SDFBoundedSourceReader/ParDo(SDFBoundedSourceDoFn)/SplitAndSizeRestriction))+(ref_PCollection_PCollection_2_split/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
((((((((ref_PCollection_PCollection_2_split/Read)+(read/Read/SDFBoundedSourceReader/ParDo(SDFBoundedSourceDoFn)/Process))+(ref_AppliedPTransform_process-input_8))+(ref_AppliedPTransform_create-transport_9))+(ref_AppliedPTransform_create-quantities_10))+(ref_AppliedPTransform_CreateGrid-pair-one_12))+(ref_PCollection_PCollection_5/Write))+(ref_PCollection_PCollection_6/Write))+(CreateGrid/group
all records/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running (((CreateGrid/group all
records/Read)+(ref_AppliedPTransform_CreateGrid-split-one-of-ParDo-PreGenerateMappings-_15))+(ref_PCollection_PCollection_10/Write))+(ref_PCollection_PCollection_11/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
((((ref_PCollection_PCollection_11/Read)+(ref_AppliedPTransform_CreateGrid-create-mappings_16))+(ref_AppliedPTransform_CreateGrid-prevent-fusion-AddRandomKeys_18))+(ref_AppliedPTransform_CreateGrid-prevent-fusion-ReshufflePerKey-Map-reify_timestamps-_20))+(CreateGrid/prevent
fusion/ReshufflePerKey/GroupByKey/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
(((((ref_AppliedPTransform_write-optimum-Write-WriteImpl-DoOnce-Impulse_55)+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-DoOnce-FlatMap-lambda-at-core-py-3229-_56))+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-DoOnce-Map-decode-_58))+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-InitializeWrite_59))+(ref_PCollection_PCollection_40/Write))+(ref_PCollection_PCollection_41/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running ((((((((((((((CreateGrid/prevent
fusion/ReshufflePerKey/GroupByKey/Read)+(ref_AppliedPTransform_CreateGrid-prevent-fusion-ReshufflePerKey-FlatMap-restore_timestamps-_22))+(ref_AppliedPTransform_CreateGrid-prevent-fusion-RemoveRandomKeys_23))+(ref_AppliedPTransform_OptimizeGrid-optimization-tasks_25))+(ref_AppliedPTransform_join-CoGroupByKeyImpl-Tag-2-_38))+(ref_AppliedPTransform_OptimizeGrid-optimize-ParDo-OptimizeProductParameters-_27))+(ref_AppliedPTransform_include-transport_28))+(ref_AppliedPTransform_join-CoGroupByKeyImpl-Tag-1-_37))+(ref_AppliedPTransform_drop-crop-and-greenhouse_29))+(aggregate
crops/Precombine))+(aggregate
crops/Group/Write))+(join/CoGroupByKeyImpl/Flatten/Transcode/1))+(join/CoGroupByKeyImpl/Flatten/Transcode/2))+(join/CoGroupByKeyImpl/Flatten/Write/2))+(join/CoGroupByKeyImpl/Flatten/Write/1)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running (((((aggregate crops/Group/Read)+(aggregate crops/Merge))+(aggregate
crops/ExtractOutputs))+(ref_AppliedPTransform_join-CoGroupByKeyImpl-Tag-0-_36))+(join/CoGroupByKeyImpl/Flatten/Transcode/0))+(join/CoGroupByKeyImpl/Flatten/Write/0)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
(join/CoGroupByKeyImpl/Flatten/Read)+(join/CoGroupByKeyImpl/GroupByKey/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
(((((join/CoGroupByKeyImpl/GroupByKey/Read)+(ref_AppliedPTransform_join-CoGroupByKeyImpl-MapTuple-collect_values-_41))+(ref_AppliedPTransform_join-RestoreTags_42))+(ref_AppliedPTransform_select-best-KeyWithVoid_44))+(select
best/CombinePerKey/Precombine))+(select best/CombinePerKey/Group/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running ((((((((select best/CombinePerKey/Group/Read)+(select
best/CombinePerKey/Merge))+(select
best/CombinePerKey/ExtractOutputs))+(ref_AppliedPTransform_select-best-UnKey_49))+(ref_AppliedPTransform_format-output_50))+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-WindowInto-WindowIntoFn-_60))+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-WriteBundles_61))+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-Pair_62))+(write
optimum/Write/WriteImpl/GroupByKey/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running ((write
optimum/Write/WriteImpl/GroupByKey/Read)+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-Extract_64))+(ref_PCollection_PCollection_46/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
((ref_PCollection_PCollection_40/Read)+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-PreFinalize_65))+(ref_PCollection_PCollection_47/Write)
INFO
apache_beam.runners.portability.fn_api_runner.fn_runner:fn_runner.py:621
Running
(ref_PCollection_PCollection_40/Read)+(ref_AppliedPTransform_write-optimum-Write-WriteImpl-FinalizeWrite_66)
INFO apache_beam.io.filebasedsink:filebasedsink.py:303 Starting
finalize_write threads with num_shards: 1 (skipped: 0), batches: 1,
num_threads: 1
INFO apache_beam.io.filebasedsink:filebasedsink.py:348 Renamed 1 shards in
0.10 seconds.
PASSED [ 21%]
apache_beam/examples/complete/estimate_pi_it_test.py::EstimatePiIT::test_estimate_pi_output_file
-------------------------------- live log call ---------------------------------
INFO apache_beam.runners.worker.worker_pool_main:worker_pool_main.py:93
Listening for workers at localhost:40531
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.7 interpreter.
INFO root:environments.py:380 Default Python SDK image for environment is
apache/beam_python3.7_sdk:2.38.0.dev
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function pack_combiners at 0x7f146c64e9e0>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function lift_combiners at 0x7f146c64ea70>
====================
INFO
apache_beam.runners.portability.fn_api_runner.translations:translations.py:678
==================== <function sort_stages at 0x7f146c653200>
====================
INFO apache_beam.utils.subprocess_server:subprocess_server.py:116 Starting
service with ['java' '-jar'
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Flink/ws/src/runners/flink/1.14/job-server/build/libs/beam-runners-flink-1.14-job-server-2.38.0-SNAPSHOT.jar'>
'--flink-master' '[auto]' '--artifacts-dir'
'/tmp/beam-tempni4lhj1e/artifactsg73x4__p' '--job-port' '58229'
'--artifact-port' '0' '--expansion-port' '0']
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM software.amazon.awssdk.regions.internal.util.EC2MetadataUtils
getItems'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125
b'WARNING: Unable to retrieve the requested metadata.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM org.apache.beam.runners.jobsubmission.JobServerDriver
createArtifactStagingService'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
ArtifactStagingService started on localhost:46157'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM org.apache.beam.runners.jobsubmission.JobServerDriver
createExpansionService'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Java ExpansionService started on localhost:44499'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM org.apache.beam.runners.jobsubmission.JobServerDriver
createJobServer'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
JobService started on localhost:58229'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:08 AM org.apache.beam.runners.jobsubmission.JobServerDriver run'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Job server now running, terminate with Ctrl+C'
WARNING root:subprocess_server.py:98 Waiting for grpc channel to be ready at
localhost:58229.
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:10 AM
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService$2 onNext'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Staging artifacts for job_89ddb44f-8270-457d-bc4a-b3e87ef3ded0.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:10 AM
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService$2
resolveNextEnvironment'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Resolving artifacts for
job_89ddb44f-8270-457d-bc4a-b3e87ef3ded0.ref_Environment_default_environment_1.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:10 AM
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService$2 onNext'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Getting 1 artifacts for job_89ddb44f-8270-457d-bc4a-b3e87ef3ded0.null.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:11 AM
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService$2
finishStaging'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Artifacts fully staged for job_89ddb44f-8270-457d-bc4a-b3e87ef3ded0.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM org.apache.beam.runners.flink.FlinkJobInvoker
invokeWithExecutor'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Invoking job
BeamApp-jenkins-0309060711-48287647_76984573-e2a0-417f-b22a-21d0844f87dc with
pipeline runner org.apache.beam.runners.flink.FlinkPipelineRunner@24ecac4f'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM org.apache.beam.runners.jobsubmission.JobInvocation start'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Starting job invocation
BeamApp-jenkins-0309060711-48287647_76984573-e2a0-417f-b22a-21d0844f87dc'
INFO apache_beam.runners.portability.portable_runner:portable_runner.py:457
Environment "LOOPBACK" has started a component necessary for the execution. Be
sure to run the pipeline using
with Pipeline() as p:
p.apply(..)
This ensures that the pipeline finishes before this program exits.
INFO apache_beam.runners.portability.portable_runner:portable_runner.py:576
Job state changed to STOPPED
INFO apache_beam.runners.portability.portable_runner:portable_runner.py:576
Job state changed to STARTING
INFO apache_beam.runners.portability.portable_runner:portable_runner.py:576
Job state changed to RUNNING
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM org.apache.beam.runners.flink.FlinkPipelineRunner
runPipelineWithTranslator'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Translating pipeline to Flink program.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM org.apache.beam.runners.flink.FlinkExecutionEnvironments
createBatchExecutionEnvironment'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'INFO:
Creating a Batch Execution Environment.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM software.amazon.awssdk.regions.internal.util.EC2MetadataUtils
getItems'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125
b'WARNING: Unable to retrieve the requested metadata.'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:12 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b"INFO:
The AWS S3 Beam extension was included in this build, but the awsRegion flag
was not specified. If you don't plan to use S3, then ignore this message."
INFO apache_beam.utils.subprocess_server:subprocess_server.py:125 b'Mar 09,
2022 6:07:13 AM org.apache.beam.sdk.io.aws2.s3.DefaultS3ClientBuilderFactory
createBuilder'
FAILURE: Build failed with an exception.
* What went wrong:
GC overhead limit exceeded
* Try:
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
* Exception is:
java.lang.OutOfMemoryError: GC overhead limit exceeded
* Get more help at https://help.gradle.org
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]