See 
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/26/display/redirect>

Changes:


------------------------------------------
[...truncated 55.47 KB...]
      NUM_GROUPS = 3
    
      extra_opts = {
          'output': self.output,
          'num_groups': str(NUM_GROUPS),
          'on_success_matcher': all_of(state_verifier)
      }
    
      # Register clean up before pipeline execution
      self.addCleanup(delete_files, [self.output + '*'])
    
      # Get pipeline options from command argument: 
--test-pipeline-options,
      # and start pipeline job by calling pipeline main function.
>     bigquery_side_input.run(
          self.test_pipeline.get_full_options_as_args(**extra_opts))

apache_beam/examples/cookbook/bigquery_side_input_it_test.py:62: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/cookbook/bigquery_side_input.py:115: in run
    pcoll_groups | WriteToText(known_args.output)
apache_beam/pipeline.py:596: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:546: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:570: in run
    pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))
apache_beam/internal/pickler.py:61: in dump_session
    return desired_pickle_lib.dump_session(file_path)
apache_beam/internal/dill_pickler.py:307: in dump_session
    dill.load_session(file_path)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py:368:
 in load_session
    module = unpickler.load()
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py:472:
 in load
    obj = StockUnpickler.load(self)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
E   RecursionError: maximum recursion depth exceeded
!!! Recursion detected (same locals & position)
------------------------------ Captured log call -------------------------------
INFO     apache_beam.internal.gcp.auth:auth.py:105 Setting socket 
default timeout to 60 seconds.
INFO     apache_beam.internal.gcp.auth:auth.py:107 socket default 
timeout is 60.0 seconds.
INFO     apache_beam.io.gcp.gcsio:gcsio.py:559 Starting the size 
estimation of the input
INFO     oauth2client.transport:transport.py:157 Attempting refresh to 
obtain initial access_token
INFO     apache_beam.io.gcp.gcsio:gcsio.py:572 Finished listing 0 
files in 0.06420087814331055 seconds.
___ 
HourlyTeamScoreIT.test_hourly_team_score_output_checksum_on_small_input ____
[gw2] linux -- Python 3.8.10 
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/bin/python3.8>

self = 
<apache_beam.examples.complete.game.hourly_team_score_it_test.HourlyTeamScoreIT 
testMethod=test_hourly_team_score_output_checksum_on_small_input>

    @pytest.mark.examples_postcommit
    def test_hourly_team_score_output_checksum_on_small_input(self):
      # Small dataset to prevent Out of Memory when running in local 
runners
      INPUT_FILE = 'gs://apache-beam-samples/game/small/gaming_data.csv'
      EXPECTED_CHECKSUM = '91143e81622aa391eb62eaa3f3a5123401edb07d'
      state_verifier = PipelineStateMatcher(PipelineState.DONE)
      query = (
          'SELECT COUNT(*) FROM `%s.%s.%s`' %
          (self.project, self.dataset_ref.dataset_id, 
self.OUTPUT_TABLE))
    
      bigquery_verifier = BigqueryMatcher(self.project, query, 
EXPECTED_CHECKSUM)
    
      extra_opts = {
          'input': INPUT_FILE,
          'dataset': self.dataset_ref.dataset_id,
          'window_duration': 1,
          'on_success_matcher': all_of(state_verifier, 
bigquery_verifier)
      }
    
      # Register clean up before pipeline execution
      # Note that actual execution happens in reverse order.
      self.addCleanup(utils.delete_bq_dataset, self.project, 
self.dataset_ref)
    
      # Get pipeline options from command argument: 
--test-pipeline-options,
      # and start pipeline job by calling pipeline main function.
>     hourly_team_score.run(
          self.test_pipeline.get_full_options_as_args(**extra_opts))

apache_beam/examples/complete/game/hourly_team_score_it_test.py:118:
 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/examples/complete/game/hourly_team_score.py:299: in run
    (  # pylint: disable=expression-not-assigned
apache_beam/pipeline.py:596: in __exit__
    self.result = self.run()
apache_beam/pipeline.py:546: in run
    return Pipeline.from_runner_api(
apache_beam/pipeline.py:570: in run
    pickler.dump_session(os.path.join(tmpdir, 'main_session.pickle'))
apache_beam/internal/pickler.py:61: in dump_session
    return desired_pickle_lib.dump_session(file_path)
apache_beam/internal/dill_pickler.py:307: in dump_session
    dill.load_session(file_path)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py:368:
 in load_session
    module = unpickler.load()
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/dill/_dill.py:472:
 in load
    obj = StockUnpickler.load(self)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
../../build/gradleenv/-1734967051/lib/python3.8/site-packages/execnet/gateway_base.py:143:
 in __getattr__
    locs = self._importdef.get(name)
E   RecursionError: maximum recursion depth exceeded
!!! Recursion detected (same locals & position)
------------------------------ Captured log call -------------------------------
INFO     apache_beam.internal.gcp.auth:auth.py:105 Setting socket 
default timeout to 60 seconds.
INFO     apache_beam.internal.gcp.auth:auth.py:107 socket default 
timeout is 60.0 seconds.
INFO     oauth2client.transport:transport.py:157 Attempting refresh to 
obtain initial access_token
=============================== warnings summary 
===============================
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/build/gradleenv/-1734967051/lib/python3.8/site-packages/tenacity/_asyncio.py>:42:
 DeprecationWarning: "@coroutine" decorator is deprecated since Python 3.8, use 
"async def" instead
    def call(self, fn, *args, **kwargs):

apache_beam/io/filesystems_test.py:54
apache_beam/io/filesystems_test.py:54
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:54:
 DeprecationWarning: invalid escape sequence \c
    self.assertIsNone(FileSystems.get_scheme('c:\\abc\cdf'))  # pylint: 
disable=anomalous-backslash-in-string

apache_beam/io/filesystems_test.py:62
apache_beam/io/filesystems_test.py:62
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:62:
 DeprecationWarning: invalid escape sequence \d
    self.assertTrue(isinstance(FileSystems.get_filesystem('c:\\abc\def'),  # 
pylint: disable=anomalous-backslash-in-string

apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
apache_beam/io/gcp/bigquery.py:2437
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2437:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    temp_location = pcoll.pipeline.options.view_as(

apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
apache_beam/io/gcp/bigquery.py:2439
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2439:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    job_name = pcoll.pipeline.options.view_as(GoogleCloudOptions).job_name

apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
apache_beam/io/gcp/bigquery.py:2463
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2463:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    pipeline_options=pcoll.pipeline.options,

apache_beam/dataframe/io.py:629
apache_beam/dataframe/io.py:629
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/dataframe/io.py>:629:
 FutureWarning: WriteToFiles is experimental.
    return pcoll | fileio.WriteToFiles(

apache_beam/io/fileio.py:550
apache_beam/io/fileio.py:550
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/fileio.py>:550:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    p.options.view_as(GoogleCloudOptions).temp_location or

apache_beam/io/gcp/tests/utils.py:63
apache_beam/io/gcp/tests/utils.py:63
apache_beam/io/gcp/tests/utils.py:63
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:63:
 PendingDeprecationWarning: Client.dataset is deprecated and will be removed in 
a future version. Use a string like 'my_project.my_dataset' or a 
cloud.google.bigquery.DatasetReference object, instead.
    dataset_ref = client.dataset(unique_dataset_name, project=project)

apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
apache_beam/io/gcp/bigquery.py:2138
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2138:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    is_streaming_pipeline = p.options.view_as(StandardOptions).streaming

apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
apache_beam/io/gcp/bigquery.py:2144
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2144:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    experiments = p.options.view_as(DebugOptions).experiments or []

apache_beam/io/gcp/bigquery_file_loads.py:1128
apache_beam/io/gcp/bigquery_file_loads.py:1128
apache_beam/io/gcp/bigquery_file_loads.py:1128
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:1128:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    temp_location = p.options.view_as(GoogleCloudOptions).temp_location

apache_beam/io/gcp/bigquery_file_loads.py:1130
apache_beam/io/gcp/bigquery_file_loads.py:1130
apache_beam/io/gcp/bigquery_file_loads.py:1130
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery_file_loads.py>:1130:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    p.options.view_as(GoogleCloudOptions).job_name or 'AUTOMATIC_JOB_NAME')

apache_beam/io/gcp/bigquery.py:2134
apache_beam/io/gcp/bigquery.py:2134
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/bigquery.py>:2134:
 BeamDeprecationWarning: options is deprecated since First stable release. 
References to <pipeline>.options will not be supported
    self.table_reference.projectId = pcoll.pipeline.options.view_as(

apache_beam/io/gcp/tests/utils.py:100
apache_beam/io/gcp/tests/utils.py:100
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/io/gcp/tests/utils.py>:100:
 PendingDeprecationWarning: Client.dataset is deprecated and will be removed in 
a future version. Use a string like 'my_project.my_dataset' or a 
cloud.google.bigquery.DatasetReference object, instead.
    table_ref = client.dataset(dataset_id).table(table_id)

apache_beam/examples/dataframe/flight_delays.py:47
  
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/apache_beam/examples/dataframe/flight_delays.py>:47:
 FutureWarning: Dropping of nuisance columns in DataFrame reductions (with 
'numeric_only=None') is deprecated; in a future version this will raise 
TypeError.  Select only valid columns before calling the reduction.
    return airline_df[at_top_airports].mean()

-- Docs: https://docs.pytest.org/en/latest/warnings.html
- generated xml file: 
<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/pytest_postCommitIT-df-py38.xml>
 -
======== 7 failed, 17 passed, 1 skipped, 59 warnings in 1411.25 
seconds ========

> Task :sdks:python:test-suites:dataflow:py38:examples FAILED

FAILURE: Build failed with an exception.

* Where:
Script 
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_Examples_Dataflow/ws/src/sdks/python/test-suites/dataflow/common.gradle'>
 line: 165

* What went wrong:
Execution failed for task ':sdks:python:test-suites:dataflow:py38:examples'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.

* Get more help at https://help.gradle.org

BUILD FAILED in 26m 16s
15 actionable tasks: 9 executed, 4 from cache, 2 up-to-date

Publishing build scan...
https://gradle.com/s/bpdbo47clutdc

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to