See
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/26/display/redirect>
Changes:
------------------------------------------
[...truncated 55.47 KB...]
[1m NUM_GROUPS = 3[0m
[1m [0m
[1m extra_opts = {[0m
[1m 'output': self.output,[0m
[1m 'num_groups': str(NUM_GROUPS),[0m
[1m 'on_success_matcher': all_of(state_verifier)[0m
[1m }[0m
[1m [0m
[1m # Register clean up before pipeline execution[0m
[1m self.addCleanup(delete_files, [self.output + '*'])[0m
[1m [0m
[1m # Get pipeline options from command argument:
--test-pipeline-options,[0m
[1m # and start pipeline job by calling pipeline main function.[0m
[1m> bigquery_side_input.run([0m
[1m self.test_pipeline.get_full_options_as_args(**extra_opts))[0m
[1m[31mapache_beam/examples/cookbook/bigquery_side_input_it_test.py[0m:62:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
[1m[31mapache_beam/examples/cookbook/bigquery_side_input.py[0m:115: in run
[1m pcoll_groups | WriteToText(known_args.output)[0m
[1m[31mapache_beam/pipeline.py[0m:596: in __exit__
[1m self.result = self.run()[0m
[1m[31mapache_beam/pipeline.py[0m:546: in run
[1m return Pipeline.from_runner_api([0m
[1m[31mapache_beam/pipeline.py[0m:570: in run
[1m pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))[0m
[1m[31mapache_beam/internal/pickler.py[0m:61: in dump_session
[1m return desired_pickle_lib.dump_session(file_path)[0m
[1m[31mapache_beam/internal/dill_pickler.py[0m:307: in dump_session
[1m dill.load_session(file_path)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py[0m:368:
in load_session
[1m module = unpickler.load()[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py[0m:472:
in load
[1m obj = StockUnpickler.load(self)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31mE RecursionError: maximum recursion depth exceeded[0m
!!! Recursion detected (same locals & position)
------------------------------ Captured log call -------------------------------
[32mINFO [0m apache_beam.internal.gcp.auth:auth.py:105 Setting socket
default timeout to 60 seconds.
[32mINFO [0m apache_beam.internal.gcp.auth:auth.py:107 socket default
timeout is 60.0 seconds.
[32mINFO [0m apache_beam.io.gcp.gcsio:gcsio.py:559 Starting the size
estimation of the input
[32mINFO [0m oauth2client.transport:transport.py:157 Attempting refresh to
obtain initial access_token
[32mINFO [0m apache_beam.io.gcp.gcsio:gcsio.py:572 Finished listing 0
files in 0.06420087814331055 seconds.
[31m[1m___
HourlyTeamScoreIT.test_hourly_team_score_output_checksum_on_small_input ____[0m
[gw2] linux -- Python 3.8.10
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/bin/python3.8>
self =
<apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT
testMethod=test_hourly_team_score_output_checksum_on_small_input>
[1m @pytest.mark.examples_postcommit[0m
[1m def test_hourly_team_score_output_checksum_on_small_input(self):[0m
[1m # Small dataset to prevent Out of Memory when running in local
runners[0m
[1m INPUT_FILE = 'gs://apache-beam-samples/game/small/gaming_data.csv'[0m
[1m EXPECTED_CHECKSUM = '91143e81622aa391eb62eaa3f3a5123401edb07d'[0m
[1m state_verifier = PipelineStateMatcher(PipelineState.DONE)[0m
[1m query = ([0m
[1m 'SELECT COUNT(*) FROM `%s.%s.%s`' %[0m
[1m (self.project, self.dataset_ref.dataset_id,
self.OUTPUT_TABLE))[0m
[1m [0m
[1m bigquery_verifier = BigqueryMatcher(self.project, query,
EXPECTED_CHECKSUM)[0m
[1m [0m
[1m extra_opts = {[0m
[1m 'input': INPUT_FILE,[0m
[1m 'dataset': self.dataset_ref.dataset_id,[0m
[1m 'window_duration': 1,[0m
[1m 'on_success_matcher': all_of(state_verifier,
bigquery_verifier)[0m
[1m }[0m
[1m [0m
[1m # Register clean up before pipeline execution[0m
[1m # Note that actual execution happens in reverse order.[0m
[1m self.addCleanup(utils.delete_bq_dataset, self.project,
self.dataset_ref)[0m
[1m [0m
[1m # Get pipeline options from command argument:
--test-pipeline-options,[0m
[1m # and start pipeline job by calling pipeline main function.[0m
[1m> hourly_team_score.run([0m
[1m self.test_pipeline.get_full_options_as_args(**extra_opts))[0m
[1m[31mapache_beam/examples/complete/game/hourly_team_score_it_test.py[0m:118:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
[1m[31mapache_beam/examples/complete/game/hourly_team_score.py[0m:299: in run
[1m ( # pylint: disable=expression-not-assigned[0m
[1m[31mapache_beam/pipeline.py[0m:596: in __exit__
[1m self.result = self.run()[0m
[1m[31mapache_beam/pipeline.py[0m:546: in run
[1m return Pipeline.from_runner_api([0m
[1m[31mapache_beam/pipeline.py[0m:570: in run
[1m pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))[0m
[1m[31mapache_beam/internal/pickler.py[0m:61: in dump_session
[1m return desired_pickle_lib.dump_session(file_path)[0m
[1m[31mapache_beam/internal/dill_pickler.py[0m:307: in dump_session
[1m dill.load_session(file_path)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py[0m:368:
in load_session
[1m module = unpickler.load()[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py[0m:472:
in load
[1m obj = StockUnpickler.load(self)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31m../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py[0m:143:
in __getattr__
[1m locs = self._importdef.get(name)[0m
[1m[31mE RecursionError: maximum recursion depth exceeded[0m
!!! Recursion detected (same locals & position)
------------------------------ Captured log call -------------------------------
[32mINFO [0m apache_beam.internal.gcp.auth:auth.py:105 Setting socket
default timeout to 60 seconds.
[32mINFO [0m apache_beam.internal.gcp.auth:auth.py:107 socket default
timeout is 60.0 seconds.
[32mINFO [0m oauth2client.transport:transport.py:157 Attempting refresh to
obtain initial access_token
[33m=============================== warnings summary
===============================[0m
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42:
DeprecationWarning: "@coroutine" decorator is deprecated since Python 3.8, use
"async def" instead
def call(self, fn, *args, **kwargs):
apache_beam/io/filesystems_test.py:54
apache_beam/io/filesystems_test.py:54
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:54:
DeprecationWarning: invalid escape sequence \c
self.assertIsNone(FileSystems.get_scheme('c:\\abc\cdf')) # pylint:
disable=anomalous-backslash-in-string
apache_beam/io/filesystems_test.py:62
apache_beam/io/filesystems_test.py:62
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:62:
DeprecationWarning: invalid escape sequence \d
self.assertTrue(isinstance(FileSystems.get_filesystem('c:\\abc\def'), #
pylint: disable=anomalous-backslash-in-string
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2437:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
temp_location = pcoll.pipeline.options.view_as(
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2439:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
job_name = pcoll.pipeline.options.view_as(GoogleCloudOptions).job_name
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2463:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
pipeline_options=pcoll.pipeline.options,
apache_beam/dataframe/io.py:629
apache_beam/dataframe/io.py:629
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/dataframe/io.py>:629:
FutureWarning: WriteToFiles is experimental.
return pcoll | fileio.WriteToFiles(
apache_beam/io/fileio.py:550
apache_beam/io/fileio.py:550
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/fileio.py>:550:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
p.options.view_as(GoogleCloudOptions).temp_location or
apache_beam/io/gcp/tests/utils.py:63
apache_beam/io/gcp/tests/utils.py:63
apache_beam/io/gcp/tests/utils.py:63
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:63:
PendingDeprecationWarning: Client.dataset is deprecated and will be removed in
a future version. Use a string like 'my_project.my_dataset' or a
cloud.google.bigquery.DatasetReference object, instead.
dataset_ref = client.dataset(unique_dataset_name, project=project)
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2138:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
is_streaming_pipeline = p.options.view_as(StandardOptions).streaming
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2144:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
experiments = p.options.view_as(DebugOptions).experiments or []
apache_beam/io/gcp/bigquery_file_loads.py:1128
apache_beam/io/gcp/bigquery_file_loads.py:1128
apache_beam/io/gcp/bigquery_file_loads.py:1128
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:1128:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
temp_location = p.options.view_as(GoogleCloudOptions).temp_location
apache_beam/io/gcp/bigquery_file_loads.py:1130
apache_beam/io/gcp/bigquery_file_loads.py:1130
apache_beam/io/gcp/bigquery_file_loads.py:1130
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:1130:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
p.options.view_as(GoogleCloudOptions).job_name or 'AUTOMATIC_JOB_NAME')
apache_beam/io/gcp/bigquery.py:2134
apache_beam/io/gcp/bigquery.py:2134
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2134:
BeamDeprecationWarning: options is deprecated since First stable release.
References to <pipeline>.options will not be supported
self.table_reference.projectId = pcoll.pipeline.options.view_as(
apache_beam/io/gcp/tests/utils.py:100
apache_beam/io/gcp/tests/utils.py:100
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:100:
PendingDeprecationWarning: Client.dataset is deprecated and will be removed in
a future version. Use a string like 'my_project.my_dataset' or a
cloud.google.bigquery.DatasetReference object, instead.
table_ref = client.dataset(dataset_id).table(table_id)
apache_beam/examples/dataframe/flight_delays.py:47
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/examples/dataframe/flight_delays.py>:47:
FutureWarning: Dropping of nuisance columns in DataFrame reductions (with
'numeric_only=None') is deprecated; in a future version this will raise
TypeError. Select only valid columns before calling the reduction.
return airline_df[at_top_airports].mean()
-- Docs: https://docs.pytest.org/en/latest/warnings.html
- generated xml file:
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/pytest_postCommitIT-df-py38.xml>
-
[31m[1m======== 7 failed, 17 passed, 1 skipped, 59 warnings in 1411.25
seconds ========[0m
> Task :sdks:python:test-suites:dataflow:py38:examples FAILED
FAILURE: Build failed with an exception.
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
line: 165
* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:examples'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
* Get more help at https://help.gradle.org
BUILD FAILED in 26m 16s
15 actionable tasks: 9 executed, 4 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/bpdbo47clutdc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]