See 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/2706/display/redirect?page=changes>

Changes:

[emilyye] sync nltk, orjson for Python image

[noreply] Allow `google-auth < 3`

[samuelw] [BEAM-12776] Change closing to happen in background in parallel for

[noreply] Fix typo in BigQuery documentation

[Steve Niemitz] [BEAM-12767] Improve PipelineOption parsing UX


------------------------------------------
[...truncated 176.32 KB...]
self = <apache_beam.transforms.sql_test.SqlTransformTest 
testMethod=test_tagged_join>

    def test_tagged_join(self):
      with TestPipeline() as p:
        enrich = (
            p | "Create enrich" >> beam.Create(
                [Enrich(1, "a"), Enrich(2, "b"), Enrich(26, "z")]))
        simple = (
            p | "Create simple" >> beam.Create([
                SimpleRow(1, "foo", 3.14),
                SimpleRow(26, "bar", 1.11),
                SimpleRow(1, "baz", 2.34)
            ]))
        out = ({
            'simple': simple, 'enrich': enrich
        }
               | SqlTransform(
                   """
                SELECT
                  simple.`id` AS `id`,
                  enrich.metadata AS metadata
                FROM simple
                JOIN enrich
                ON simple.`id` = enrich.`id`"""))
>       assert_that(out, equal_to([(1, "a"), (26, "z"), (1, "a")]))

apache_beam/transforms/sql_test.py:141: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:586: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:565: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:440: in run_pipeline
    job_service_handle.submit(proto_pipeline)
apache_beam/runners/portability/portable_runner.py:115: in submit
    return self.run(prepare_response.preparation_id)
apache_beam/runners/portability/portable_runner.py:242: in run
    beam_job_api_pb2.RunJobRequest(preparation_id=preparation_id))
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:946:
 in __call__
    return _end_unary_response_blocking(state, call, False, None)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

state = <grpc._channel._RPCState object at 0x7fd2effd2780>
call = <grpc._cython.cygrpc.SegregatedCall object at 0x7fd2effd3248>
with_call = False, deadline = None

    def _end_unary_response_blocking(state, call, with_call, deadline):
        if state.code is grpc.StatusCode.OK:
            if with_call:
                rendezvous = _MultiThreadedRendezvous(state, call, None, 
deadline)
                return state.response, rendezvous
            else:
                return state.response
        else:
>           raise _InactiveRpcError(state)
E           grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that 
terminated with:
E               status = StatusCode.INTERNAL
E               details = ""
E               debug_error_string = 
"{"created":"@1630456788.798324089","description":"Error received from peer 
ipv4:127.0.0.1:35375","file":"src/core/lib/surface/call.cc","file_line":1069,"grpc_message":"","grpc_status":13}"
E           >

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:849:
 _InactiveRpcError
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
__________________ SqlTransformTest.test_windowing_before_sql __________________

self = <apache_beam.transforms.sql_test.SqlTransformTest 
testMethod=test_windowing_before_sql>

    def test_windowing_before_sql(self):
      with TestPipeline() as p:
        out = (
            p | beam.Create([
                SimpleRow(5, "foo", 1.),
                SimpleRow(15, "bar", 2.),
                SimpleRow(25, "baz", 3.)
            ])
            | beam.Map(lambda v: beam.window.TimestampedValue(v, v.id)).
            with_output_types(SimpleRow)
            | beam.WindowInto(
                beam.window.FixedWindows(10)).with_output_types(SimpleRow)
            | SqlTransform("SELECT COUNT(*) as `count` FROM PCOLLECTION"))
>       assert_that(out, equal_to([(1, ), (1, ), (1, )]))

apache_beam/transforms/sql_test.py:175: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:586: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:565: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:440: in run_pipeline
    job_service_handle.submit(proto_pipeline)
apache_beam/runners/portability/portable_runner.py:115: in submit
    return self.run(prepare_response.preparation_id)
apache_beam/runners/portability/portable_runner.py:242: in run
    beam_job_api_pb2.RunJobRequest(preparation_id=preparation_id))
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:946:
 in __call__
    return _end_unary_response_blocking(state, call, False, None)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

state = <grpc._channel._RPCState object at 0x7fd2efd71e10>
call = <grpc._cython.cygrpc.SegregatedCall object at 0x7fd312b11f88>
with_call = False, deadline = None

    def _end_unary_response_blocking(state, call, with_call, deadline):
        if state.code is grpc.StatusCode.OK:
            if with_call:
                rendezvous = _MultiThreadedRendezvous(state, call, None, 
deadline)
                return state.response, rendezvous
            else:
                return state.response
        else:
>           raise _InactiveRpcError(state)
E           grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that 
terminated with:
E               status = StatusCode.INTERNAL
E               details = ""
E               debug_error_string = 
"{"created":"@1630456798.930197271","description":"Error received from peer 
ipv4:127.0.0.1:35375","file":"src/core/lib/surface/call.cc","file_line":1069,"grpc_message":"","grpc_status":13}"
E           >

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:849:
 _InactiveRpcError
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
_________________ SqlTransformTest.test_zetasql_generate_data __________________

self = <apache_beam.transforms.sql_test.SqlTransformTest 
testMethod=test_zetasql_generate_data>

    def test_zetasql_generate_data(self):
      with TestPipeline() as p:
        out = p | SqlTransform(
            """SELECT
              CAST(1 AS INT64) AS `int`,
              CAST('foo' AS STRING) AS `str`,
              CAST(3.14  AS FLOAT64) AS `flt`""",
            dialect="zetasql")
>       assert_that(out, equal_to([(1, "foo", 3.14)]))

apache_beam/transforms/sql_test.py:160: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
apache_beam/pipeline.py:586: in __exit__
    self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
    False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:565: in run
    return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:440: in run_pipeline
    job_service_handle.submit(proto_pipeline)
apache_beam/runners/portability/portable_runner.py:115: in submit
    return self.run(prepare_response.preparation_id)
apache_beam/runners/portability/portable_runner.py:242: in run
    beam_job_api_pb2.RunJobRequest(preparation_id=preparation_id))
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:946:
 in __call__
    return _end_unary_response_blocking(state, call, False, None)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

state = <grpc._channel._RPCState object at 0x7fd2efe08828>
call = <grpc._cython.cygrpc.SegregatedCall object at 0x7fd2efc39b08>
with_call = False, deadline = None

    def _end_unary_response_blocking(state, call, with_call, deadline):
        if state.code is grpc.StatusCode.OK:
            if with_call:
                rendezvous = _MultiThreadedRendezvous(state, call, None, 
deadline)
                return state.response, rendezvous
            else:
                return state.response
        else:
>           raise _InactiveRpcError(state)
E           grpc._channel._InactiveRpcError: <_InactiveRpcError of RPC that 
terminated with:
E               status = StatusCode.INTERNAL
E               details = ""
E               debug_error_string = 
"{"created":"@1630456810.166746690","description":"Error received from peer 
ipv4:127.0.0.1:35375","file":"src/core/lib/surface/call.cc","file_line":1069,"grpc_message":"","grpc_status":13}"
E           >

../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:849:
 _InactiveRpcError
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python 
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
WARNING  root:environments.py:374 Make sure that locally built Python SDK 
docker image has Python 3.6 interpreter.
- generated xml file: 
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangSqlValidateRunner.xml>
 -
================= 9 failed, 4983 deselected in 111.66 seconds ==================

> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql 
> FAILED

> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerCleanup
Stopping expansion service pid: 8886.
Stopping expansion service pid: 8889.

> Task :runners:spark:2:job-server:sparkJobServerCleanup
Stopping job server pid: 13836.

FAILURE: Build completed with 3 failures.

1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task 
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task 
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingPython'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task 
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql'.
> Process 'command 'sh'' finished with non-zero exit value 1

* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug 
option to get more log output. Run with --scan to get full insights.
==============================================================================

* Get more help at https://help.gradle.org

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 7.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 25m 28s
177 actionable tasks: 126 executed, 47 from cache, 4 up-to-date

Publishing build scan...
https://gradle.com/s/biakf4vlxtkbi

Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to