See
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/3304/display/redirect?page=changes>
Changes:
[noreply] Update Python SDK beam-master tags (#16630)
[noreply] Merge pull request #16592 from [BEAM-13722][Playground] Add
precompiling
------------------------------------------
[...truncated 620.37 KB...]
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
__________________ SqlTransformTest.test_windowing_before_sql __________________
self = <apache_beam.transforms.sql_test.SqlTransformTest
testMethod=test_windowing_before_sql>
def test_windowing_before_sql(self):
with TestPipeline() as p:
out = (
p | beam.Create([
SimpleRow(5, "foo", 1.),
SimpleRow(15, "bar", 2.),
SimpleRow(25, "baz", 3.)
])
| beam.Map(lambda v: beam.window.TimestampedValue(v, v.id)).
with_output_types(SimpleRow)
| beam.WindowInto(
beam.window.FixedWindows(10)).with_output_types(SimpleRow)
| SqlTransform("SELECT COUNT(*) as `count` FROM PCOLLECTION"))
> assert_that(out, equal_to([(1, ), (1, ), (1, )]))
apache_beam/transforms/sql_test.py:183:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/pipeline.py:596: in __exit__
self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:438: in run_pipeline
job_service_handle = self.create_job_service(options)
apache_beam/runners/portability/portable_runner.py:317: in create_job_service
return self.create_job_service_handle(server.start(), options)
apache_beam/runners/portability/job_server.py:54: in start
grpc.channel_ready_future(channel).result(timeout=self._timeout)
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139:
in result
self._block(timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <grpc._utilities._ChannelReadyFuture object at 0x7fa46c938438>
timeout = 60
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise grpc.FutureCancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
> raise grpc.FutureTimeoutError()
E grpc.FutureTimeoutError
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85:
FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
_________________ SqlTransformTest.test_zetasql_generate_data __________________
self = <apache_beam.transforms.sql_test.SqlTransformTest
testMethod=test_zetasql_generate_data>
def test_zetasql_generate_data(self):
with TestPipeline() as p:
out = p | SqlTransform(
"""SELECT
CAST(1 AS INT64) AS `int`,
CAST('foo' AS STRING) AS `str`,
CAST(3.14 AS FLOAT64) AS `flt`""",
dialect="zetasql")
> assert_that(out, equal_to([(1, "foo", 3.14)]))
apache_beam/transforms/sql_test.py:168:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
apache_beam/pipeline.py:596: in __exit__
self.result = self.run()
apache_beam/testing/test_pipeline.py:114: in run
False if self.not_use_test_runner_api else test_runner_api))
apache_beam/pipeline.py:573: in run
return self.runner.run_pipeline(self, self._options)
apache_beam/runners/portability/portable_runner.py:438: in run_pipeline
job_service_handle = self.create_job_service(options)
apache_beam/runners/portability/portable_runner.py:317: in create_job_service
return self.create_job_service_handle(server.start(), options)
apache_beam/runners/portability/job_server.py:54: in start
grpc.channel_ready_future(channel).result(timeout=self._timeout)
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139:
in result
self._block(timeout)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <grpc._utilities._ChannelReadyFuture object at 0x7fa46c932c18>
timeout = 60
def _block(self, timeout):
until = None if timeout is None else time.time() + timeout
with self._condition:
while True:
if self._cancelled:
raise grpc.FutureCancelledError()
elif self._matured:
return
else:
if until is None:
self._condition.wait()
else:
remaining = until - time.time()
if remaining < 0:
> raise grpc.FutureTimeoutError()
E grpc.FutureTimeoutError
../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85:
FutureTimeoutError
----------------------------- Captured stderr call -----------------------------
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
WARNING:root:Make sure that locally built Python SDK docker image has Python
3.6 interpreter.
------------------------------ Captured log call -------------------------------
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
WARNING root:environments.py:374 Make sure that locally built Python SDK
docker image has Python 3.6 interpreter.
- generated xml file:
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangSqlValidateRunner.xml>
-
================= 9 failed, 5167 deselected in 665.45 seconds ==================
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql
> FAILED
> Task :runners:spark:2:job-server:validatesCrossLanguageRunnerCleanup
Stopping expansion service pid: 10211.
Stopping expansion service pid: 10212.
> Task :runners:spark:2:job-server:sparkJobServerCleanup
Stopping job server pid: 22612.
FAILURE: Build completed with 7 failures.
1: Task failed with an exception.
-----------
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/go/test/build.gradle'>
line: 170
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerGoUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingJava'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/runners/spark/2/job-server/build/reports/tests/validatesCrossLanguageRunnerJavaUsingJava/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
3: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingPython'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/runners/spark/2/job-server/build/reports/tests/validatesCrossLanguageRunnerJavaUsingPython/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
4: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerJavaUsingPythonOnly'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/runners/spark/2/job-server/build/reports/tests/validatesCrossLanguageRunnerJavaUsingPythonOnly/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
5: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
6: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingPython'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
7: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task
':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.3.2/userguide/command_line_interface.html#sec:command_line_warnings
Execution optimizations have been disabled for 1 invalid unit(s) of work during
this build to ensure correctness.
Please consult deprecation warnings for more details.
BUILD FAILED in 32m 34s
217 actionable tasks: 151 executed, 57 from cache, 9 up-to-date
Publishing build scan...
https://gradle.com/s/cfpmkjj2qyabg
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]