See
<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/3327/display/redirect>
Changes:
------------------------------------------
[...truncated 2.77 MB...]
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.$closeResource(DefaultJobBundleFactory.java:635)
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.close(DefaultJobBundleFactory.java:635)
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.unref(DefaultJobBundleFactory.java:651)
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory$WrappedSdkHarnessClient.access$400(DefaultJobBundleFactory.java:581)
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory.lambda$createEnvironmentCaches$3(DefaultJobBundleFactory.java:213)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.processPendingNotifications(LocalCache.java:1809)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.runUnlockedCleanup(LocalCache.java:3462)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.postWriteCleanup(LocalCache.java:3438)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.clear(LocalCache.java:3215)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.clear(LocalCache.java:4270)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalManualCache.invalidateAll(LocalCache.java:4909)
at
org.apache.beam.runners.fnexecution.control.DefaultJobBundleFactory.close(DefaultJobBundleFactory.java:320)
at
org.apache.beam.runners.fnexecution.control.DefaultExecutableStageContext.close(DefaultExecutableStageContext.java:43)
at
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory$WrappedContext.closeActual(ReferenceCountingExecutableStageContextFactory.java:208)
at
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory$WrappedContext.access$200(ReferenceCountingExecutableStageContextFactory.java:184)
at
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory.release(ReferenceCountingExecutableStageContextFactory.java:173)
at
org.apache.beam.runners.fnexecution.control.ReferenceCountingExecutableStageContextFactory.lambda$scheduleRelease$1(ReferenceCountingExecutableStageContextFactory.java:127)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
at
java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by:
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannel$AnnotatedConnectException:
Connection refused: localhost/127.0.0.1:39487
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
at
sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:714)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.socket.nio.NioSocketChannel.doFinishConnect(NioSocketChannel.java:327)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:334)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:688)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:635)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:552)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:514)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1044)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
at
org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
INFO:apache_beam.runners.worker.sdk_worker:Done consuming work.
INFO:apache_beam.runners.portability.fn_api_runner.translations:====================
<function lift_combiners at 0x7fa4d01e6758> ====================
20/06/26 06:28:33 INFO
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Staging
artifacts for
{"sessionId":"job_f9e4bad9-e5ca-4352-afda-611987f346c0","basePath":"/tmp/sparktestmBe43E"}.
20/06/26 06:28:33 INFO
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Resolving
artifacts for
{"sessionId":"job_f9e4bad9-e5ca-4352-afda-611987f346c0","basePath":"/tmp/sparktestmBe43E"}.ref_Environment_default_environment_1.
20/06/26 06:28:33 INFO
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Getting 0
artifacts for
{"sessionId":"job_f9e4bad9-e5ca-4352-afda-611987f346c0","basePath":"/tmp/sparktestmBe43E"}.null.
20/06/26 06:28:33 INFO
org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Artifacts
fully staged for
{"sessionId":"job_f9e4bad9-e5ca-4352-afda-611987f346c0","basePath":"/tmp/sparktestmBe43E"}.
20/06/26 06:28:33 INFO org.apache.beam.runners.spark.SparkJobInvoker: Invoking
job test_windowing_1593152909.21_ad850e69-e6cc-44c8-b6f1-4629993de887
20/06/26 06:28:33 INFO org.apache.beam.runners.jobsubmission.JobInvocation:
Starting job invocation
test_windowing_1593152909.21_ad850e69-e6cc-44c8-b6f1-4629993de887
INFO:apache_beam.runners.portability.portable_runner:Environment "LOOPBACK" has
started a component necessary for the execution. Be sure to run the pipeline
using
with Pipeline() as p:
p.apply(..)
This ensures that the pipeline finishes before this program exits.
INFO:apache_beam.runners.portability.portable_runner:Job state changed to
STOPPED
INFO:apache_beam.runners.portability.portable_runner:Job state changed to
STARTING
INFO:apache_beam.runners.portability.portable_runner:Job state changed to
RUNNING
20/06/26 06:28:33 INFO org.apache.beam.runners.spark.SparkPipelineRunner:
PipelineOptions.filesToStage was not specified. Defaulting to files from the
classpath
20/06/26 06:28:33 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Will
stage 7 files. (Enable logging at DEBUG level to see which files will be
staged.)
20/06/26 06:28:33 INFO org.apache.beam.runners.spark.SparkPipelineRunner:
Running job test_windowing_1593152909.21_ad850e69-e6cc-44c8-b6f1-4629993de887
on Spark master local
20/06/26 06:28:34 WARN
org.apache.beam.runners.spark.translation.GroupNonMergingWindowsFunctions:
Either coder LengthPrefixCoder(ByteArrayCoder) or GlobalWindow$Coder is not
consistent with equals. That might cause issues on some runners.
20/06/26 06:28:34 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job
test_windowing_1593152909.21_ad850e69-e6cc-44c8-b6f1-4629993de887: Pipeline
translated successfully. Computing outputs
INFO:apache_beam.runners.worker.statecache:Creating state cache with size 0
INFO:apache_beam.runners.worker.sdk_worker:Creating insecure control channel
for localhost:42193.
INFO:apache_beam.runners.worker.sdk_worker:Control channel established.
INFO:apache_beam.runners.worker.sdk_worker:Initializing SDKHarness with
unbounded number of workers.
20/06/26 06:28:34 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: Beam
Fn Control client connected with id 32-1
20/06/26 06:28:34 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService:
getProcessBundleDescriptor request with id 32-2
INFO:apache_beam.runners.worker.sdk_worker:Creating insecure state channel for
localhost:37239.
INFO:apache_beam.runners.worker.sdk_worker:State channel established.
INFO:apache_beam.runners.worker.data_plane:Creating client data channel for
localhost:41067
20/06/26 06:28:34 INFO
org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client
connected.
20/06/26 06:28:34 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService:
getProcessBundleDescriptor request with id 32-3
20/06/26 06:28:35 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService:
getProcessBundleDescriptor request with id 32-4
20/06/26 06:28:35 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService:
getProcessBundleDescriptor request with id 32-5
20/06/26 06:28:35 INFO
org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService:
getProcessBundleDescriptor request with id 32-6
20/06/26 06:28:36 INFO org.apache.beam.runners.spark.SparkPipelineRunner: Job
test_windowing_1593152909.21_ad850e69-e6cc-44c8-b6f1-4629993de887 finished.
INFO:apache_beam.runners.portability.portable_runner:Job state changed to DONE
.
======================================================================
ERROR: test_assert_that (__main__.SparkRunnerTest)
----------------------------------------------------------------------
Traceback (most recent call last):
File "apache_beam/runners/portability/fn_api_runner/fn_runner_test.py", line
113, in test_assert_that
assert_that(p | beam.Create(['a', 'b']), equal_to(['a']))
File "apache_beam/pipeline.py", line 547, in __exit__
self.run().wait_until_finish()
File "apache_beam/runners/portability/portable_runner.py", line 543, in
wait_until_finish
self._observe_state(message_thread)
File "apache_beam/runners/portability/portable_runner.py", line 552, in
_observe_state
for state_response in self._state_stream:
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 413, in next
return self._next()
==================== Timed out after 60 seconds. ====================
# Thread: <Thread(wait_until_finish_read, started daemon 140344893105920)>
# Thread: <Thread(Thread-7, started daemon 140345248126720)>
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 697, in _next
# Thread: <Thread(Thread-1, started daemon 140345750435584)>
# Thread: <_MainThread(MainThread, started 140346040198912)>
_common.wait(self._state.condition.wait, _response_ready)
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_common.py",>
line 138, in wait
_wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb)
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_common.py",>
line 103, in _wait_once
wait_fn(timeout=timeout)
File "/usr/lib/python2.7/threading.py", line 359, in wait
_sleep(delay)
File "apache_beam/runners/portability/portable_runner_test.py", line 82, in
handler
raise BaseException(msg)
BaseException: Timed out after 60 seconds.
----------------------------------------------------------------------
Ran 41 tests in 381.394s
FAILED (errors=1, skipped=10)
Exception in thread read_grpc_client_inputs (most likely raised during
interpreter shutdown):Exception in thread Thread-541 (most likely raised during
interpreter shutdown):
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "apache_beam/runners/worker/data_plane.py", line 184, in run
File "apache_beam/runners/worker/sdk_worker.py", line 431, in
shutdown_inactive_bundle_processors
<type 'exceptions.AttributeError'>: 'NoneType' object has no attribute 'time'
Exception in thread run_worker_31-1 (most likely raised during interpreter
shutdown):
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "/usr/lib/python2.7/threading.py", line 754, in run
File "apache_beam/runners/worker/sdk_worker.py", line 215, in run
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 413, in next
Exception in thread run_worker_32-1 (most likely raised during interpreter
shutdown):
Traceback (most recent call last):Exception in thread read_state (most likely
raised during interpreter shutdown):
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "/usr/lib/python2.7/threading.py", line 754, in run
File "apache_beam/runners/worker/sdk_worker.py", line 796, in pull_responses
<type 'exceptions.AttributeError'>: 'NoneType' object has no attribute
'exc_info'
Exception in thread Thread-524 (most likely raised during interpreter
shutdown): File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 706, in _nextException in thread read_grpc_client_inputs (most likely
raised during interpreter shutdown):
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "apache_beam/runners/worker/data_plane.py", line 184, in run
File "/usr/lib/python2.7/threading.py", line 289, in __exit__ File
"/usr/lib/python2.7/threading.py", line 801, in __bootstrap_innerException in
thread read_state (most likely raised during interpreter shutdown): File
"apache_beam/runners/worker/sdk_worker.py", line 431, in
shutdown_inactive_bundle_processors
<type 'exceptions.AttributeError'>: 'NoneType' object has no attribute 'time'
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
Traceback (most recent call last): File "/usr/lib/python2.7/threading.py",
line 754, in run File "/usr/lib/python2.7/threading.py", line 754, in run
Traceback (most recent call last):
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "apache_beam/runners/worker/data_plane.py", line 545, in <lambda>
File "apache_beam/runners/worker/data_plane.py", line 540, in _read_inputs
File "/usr/lib/python2.7/threading.py", line 585, in set
File "/usr/lib/python2.7/threading.py", line 407, in notifyAll
<type 'exceptions.TypeError'>: 'NoneType' object is not callable
File "/usr/lib/python2.7/threading.py", line 801, in __bootstrap_inner
File "/usr/lib/python2.7/threading.py", line 754, in run
File "apache_beam/runners/worker/data_plane.py", line 545, in <lambda>
File "apache_beam/runners/worker/data_plane.py", line 540, in _read_inputs
File "/usr/lib/python2.7/threading.py", line 585, in set
File "/usr/lib/python2.7/threading.py", line 407, in notifyAll
<type 'exceptions.TypeError'>: 'NoneType' object is not callable
File "/usr/lib/python2.7/threading.py", line 754, in run
File "apache_beam/runners/worker/sdk_worker.py", line 796, in pull_responses
<type 'exceptions.AttributeError'>: 'NoneType' object has no attribute
'exc_info'
File "apache_beam/runners/worker/sdk_worker.py", line 215, in run
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 413, in next
File
"<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/build/gradleenv/1866363813/local/lib/python2.7/site-packages/grpc/_channel.py",>
line 706, in _next
File "/usr/lib/python2.7/threading.py", line 289, in __exit__
File "/usr/lib/python2.7/threading.py", line 216, in __exit__
File "/usr/lib/python2.7/threading.py", line 203, in release
<type 'exceptions.TypeError'>: 'NoneType' object is not callable
File "/usr/lib/python2.7/threading.py", line 216, in __exit__
File "/usr/lib/python2.7/threading.py", line 203, in release
<type 'exceptions.TypeError'>: 'NoneType' object is not callable
> Task
> :sdks:python:test-suites:portable:py2:sparkCompatibilityMatrixBatchLOOPBACK
> FAILED
FAILURE: Build completed with 4 failures.
1: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'>
line: 181
* What went wrong:
Execution failed for task
':sdks:python:test-suites:portable:py37:sparkCompatibilityMatrixBatchLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'>
line: 181
* What went wrong:
Execution failed for task
':sdks:python:test-suites:portable:py36:sparkCompatibilityMatrixBatchLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
3: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'>
line: 181
* What went wrong:
Execution failed for task
':sdks:python:test-suites:portable:py35:sparkCompatibilityMatrixBatchLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
4: Task failed with an exception.
-----------
* Where:
Script
'<https://ci-beam.apache.org/job/beam_PostCommit_Python_VR_Spark/ws/src/sdks/python/test-suites/portable/common.gradle'>
line: 181
* What went wrong:
Execution failed for task
':sdks:python:test-suites:portable:py2:sparkCompatibilityMatrixBatchLOOPBACK'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 27m 51s
73 actionable tasks: 58 executed, 15 from cache
Publishing build scan...
https://gradle.com/s/qv76nt5srlalk
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]