See <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/3464/display/redirect?page=changes>
Changes: [noreply] [BEAM-14072] [BEAM-13993] [BEAM-10039] Import beam plugins before ------------------------------------------ [...truncated 846.14 KB...] =============================== warnings summary =============================== apache_beam/io/filesystems_test.py:54 <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:54: DeprecationWarning: invalid escape sequence \c self.assertIsNone(FileSystems.get_scheme('c:\\abc\cdf')) # pylint: disable=anomalous-backslash-in-string apache_beam/io/filesystems_test.py:62 <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/apache_beam/io/filesystems_test.py>:62: DeprecationWarning: invalid escape sequence \d self.assertTrue(isinstance(FileSystems.get_filesystem('c:\\abc\def'), # pylint: disable=anomalous-backslash-in-string -- Docs: https://docs.pytest.org/en/latest/warnings.html - generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangValidateRunner.xml> - =========== 10 passed, 5241 deselected, 2 warnings in 115.33 seconds =========== > Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingPython >>> RUNNING integration tests with pipeline options: --runner=PortableRunner >>> --job_endpoint=localhost:41721 --environment_cache_millis=10000 >>> --experiments=beam_fn_api >>> pytest options: >>> collect markers: -m=xlang_transforms ============================= test session starts ============================== platform linux -- Python 3.6.15, pytest-4.6.11, py-1.11.0, pluggy-0.13.1 rootdir: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python,> inifile: pytest.ini plugins: xdist-1.34.0, timeout-1.4.2, forked-1.4.0, requests-mock-1.9.3 timeout: 600.0s timeout method: signal timeout func_only: False collected 5251 items / 5241 deselected / 10 selected apache_beam/io/external/generate_sequence_test.py .ss [ 30%] apache_beam/transforms/validate_runner_xlang_test.py ....... [100%] - generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangValidateRunner.xml> - ============ 8 passed, 2 skipped, 5241 deselected in 71.07 seconds ============= > Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql >>> RUNNING integration tests with pipeline options: --runner=PortableRunner >>> --job_endpoint=localhost:41721 --environment_cache_millis=10000 >>> --experiments=beam_fn_api >>> pytest options: >>> collect markers: -m=xlang_sql_expansion_service ============================= test session starts ============================== platform linux -- Python 3.6.15, pytest-4.6.11, py-1.11.0, pluggy-0.13.1 rootdir: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python,> inifile: pytest.ini plugins: xdist-1.34.0, timeout-1.4.2, forked-1.4.0, requests-mock-1.9.3 timeout: 600.0s timeout method: signal timeout func_only: False collected 5251 items / 5242 deselected / 9 selected apache_beam/transforms/sql_test.py .......FF [100%] =================================== FAILURES =================================== __________________ SqlTransformTest.test_windowing_before_sql __________________ self = <apache_beam.transforms.sql_test.SqlTransformTest testMethod=test_windowing_before_sql> def test_windowing_before_sql(self): with TestPipeline() as p: out = ( p | beam.Create([ SimpleRow(5, "foo", 1.), SimpleRow(15, "bar", 2.), SimpleRow(25, "baz", 3.) ]) | beam.Map(lambda v: beam.window.TimestampedValue(v, v.id)). with_output_types(SimpleRow) | beam.WindowInto( beam.window.FixedWindows(10)).with_output_types(SimpleRow) | SqlTransform("SELECT COUNT(*) as `count` FROM PCOLLECTION")) > assert_that(out, equal_to([(1, ), (1, ), (1, )])) apache_beam/transforms/sql_test.py:175: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ apache_beam/pipeline.py:596: in __exit__ self.result = self.run() apache_beam/testing/test_pipeline.py:116: in run state = result.wait_until_finish() apache_beam/runners/portability/portable_runner.py:600: in wait_until_finish raise self._runtime_exception apache_beam/runners/portability/portable_runner.py:606: in _observe_state for state_response in self._state_stream: ../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:426: in __next__ return self._next() _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <_MultiThreadedRendezvous of RPC that terminated with: status = StatusCode.UNAVAILABLE details = "Connection reset b...1","file":"src/core/lib/surface/call.cc","file_line":903,"grpc_message":"Connection reset by peer","grpc_status":14}" > def _next(self): with self._state.condition: if self._state.code is None: event_handler = _event_handler(self._state, self._response_deserializer) self._state.due.add(cygrpc.OperationType.receive_message) operating = self._call.operate( (cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),), event_handler) if not operating: self._state.due.remove(cygrpc.OperationType.receive_message) elif self._state.code is grpc.StatusCode.OK: raise StopIteration() else: raise self def _response_ready(): return (self._state.response is not None or (cygrpc.OperationType.receive_message not in self._state.due and self._state.code is not None)) _common.wait(self._state.condition.wait, _response_ready) if self._state.response is not None: response = self._state.response self._state.response = None return response elif cygrpc.OperationType.receive_message not in self._state.due: if self._state.code is grpc.StatusCode.OK: raise StopIteration() elif self._state.code is not None: > raise self E grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with: E status = StatusCode.UNAVAILABLE E details = "Connection reset by peer" E debug_error_string = "{"created":"@1647132187.915042093","description":"Error received from peer ipv6:[::1]:41721","file":"src/core/lib/surface/call.cc","file_line":903,"grpc_message":"Connection reset by peer","grpc_status":14}" E > ../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py:826: _MultiThreadedRendezvous ----------------------------- Captured stderr call ----------------------------- E0313 00:42:48.911252180 735329 fork_posix.cc:70] Fork support is only compatible with the epoll1 and poll polling strategies WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter. WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter. ERROR:root:org.apache.spark.SparkException: Job 0 cancelled because SparkContext was shut down ------------------------------ Captured log call ------------------------------- WARNING root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter. WARNING root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter. ERROR root:portable_runner.py:570 org.apache.spark.SparkException: Job 0 cancelled because SparkContext was shut down _________________ SqlTransformTest.test_zetasql_generate_data __________________ self = <apache_beam.transforms.sql_test.SqlTransformTest testMethod=test_zetasql_generate_data> def test_zetasql_generate_data(self): with TestPipeline() as p: out = p | SqlTransform( """SELECT CAST(1 AS INT64) AS `int`, CAST('foo' AS STRING) AS `str`, CAST(3.14 AS FLOAT64) AS `flt`""", dialect="zetasql") > assert_that(out, equal_to([(1, "foo", 3.14)])) apache_beam/transforms/sql_test.py:160: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ apache_beam/pipeline.py:596: in __exit__ self.result = self.run() apache_beam/testing/test_pipeline.py:114: in run False if self.not_use_test_runner_api else test_runner_api)) apache_beam/pipeline.py:573: in run return self.runner.run_pipeline(self, self._options) apache_beam/runners/portability/portable_runner.py:438: in run_pipeline job_service_handle = self.create_job_service(options) apache_beam/runners/portability/portable_runner.py:317: in create_job_service return self.create_job_service_handle(server.start(), options) apache_beam/runners/portability/job_server.py:54: in start grpc.channel_ready_future(channel).result(timeout=self._timeout) ../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:139: in result self._block(timeout) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <grpc._utilities._ChannelReadyFuture object at 0x7fd44c1abb38> timeout = 60 def _block(self, timeout): until = None if timeout is None else time.time() + timeout with self._condition: while True: if self._cancelled: raise grpc.FutureCancelledError() elif self._matured: return else: if until is None: self._condition.wait() else: remaining = until - time.time() if remaining < 0: > raise grpc.FutureTimeoutError() E grpc.FutureTimeoutError ../../build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_utilities.py:85: FutureTimeoutError ----------------------------- Captured stderr call ----------------------------- E0313 00:43:08.354950507 735329 fork_posix.cc:70] Fork support is only compatible with the epoll1 and poll polling strategies WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter. WARNING:root:Make sure that locally built Python SDK docker image has Python 3.6 interpreter. ------------------------------ Captured log call ------------------------------- WARNING root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter. WARNING root:environments.py:374 Make sure that locally built Python SDK docker image has Python 3.6 interpreter. - generated xml file: <https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/pytest_xlangSqlValidateRunner.xml> - ============ 2 failed, 7 passed, 5242 deselected in 272.53 seconds ============= Exception in thread wait_until_finish_read: Traceback (most recent call last): File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner self.run() File "/usr/lib/python3.6/threading.py", line 864, in run self._target(*self._args, **self._kwargs) File "<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/sdks/python/apache_beam/runners/portability/portable_runner.py",> line 565, in read_messages for message in self._message_stream: File "<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py",> line 426, in __next__ return self._next() File "<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark/ws/src/build/gradleenv/1922375555/lib/python3.6/site-packages/grpc/_channel.py",> line 826, in _next raise self grpc._channel._MultiThreadedRendezvous: <_MultiThreadedRendezvous of RPC that terminated with: status = StatusCode.UNAVAILABLE details = "Connection reset by peer" debug_error_string = "{"created":"@1647132187.915074588","description":"Error received from peer ipv6:[::1]:41721","file":"src/core/lib/surface/call.cc","file_line":903,"grpc_message":"Connection reset by peer","grpc_status":14}" > > Task :runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql > FAILED > Task :runners:spark:2:job-server:validatesCrossLanguageRunnerCleanup > Task :runners:spark:2:job-server:sparkJobServerCleanup FAILURE: Build failed with an exception. * What went wrong: Execution failed for task ':runners:spark:2:job-server:validatesCrossLanguageRunnerPythonUsingSql'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: > Run with --stacktrace option to get the stack trace. > Run with --info or --debug option to get more log output. > Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 8.0. You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins. See https://docs.gradle.org/7.3.2/userguide/command_line_interface.html#sec:command_line_warnings Execution optimizations have been disabled for 1 invalid unit(s) of work during this build to ensure correctness. Please consult deprecation warnings for more details. BUILD FAILED in 39m 42s 250 actionable tasks: 171 executed, 68 from cache, 11 up-to-date Publishing build scan... https://gradle.com/s/v5qwep6qqowfg Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
