[ 
https://issues.apache.org/jira/browse/BEAM-11484?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Kenneth Knowles updated BEAM-11484:
-----------------------------------
    Status: Open  (was: Triage Needed)

> Spark test failure: 
> org.apache.beam.runners.core.metrics.MetricsPusherTest.pushesUserMetrics
> --------------------------------------------------------------------------------------------
>
>                 Key: BEAM-11484
>                 URL: https://issues.apache.org/jira/browse/BEAM-11484
>             Project: Beam
>          Issue Type: Sub-task
>          Components: runner-spark, test-failures
>            Reporter: Tyson Hamilton
>            Priority: P1
>              Labels: flake, portability-spark
>
> h1.  
> {code:java}
> Regression
> org.apache.beam.runners.core.metrics.MetricsPusherTest.pushesUserMetrics
> Failing for the past 1 build (Since #466 ) Took 47 sec.   Error Message
> java.lang.NullPointerException
> Stacktrace
> java.lang.NullPointerException at 
> org.apache.beam.runners.core.metrics.TestMetricsSink.getCounterValue(TestMetricsSink.java:43)
>  at 
> org.apache.beam.runners.core.metrics.MetricsPusherTest.pushesUserMetrics(MetricsPusherTest.java:77)
>  at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>  at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>  at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  at 
> org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) 
> at org.apache.beam.sdk.testing.TestPipeline$1.evaluate(TestPipeline.java:322) 
> at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:305) at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>  at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:365) at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>  at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>  at org.junit.runners.ParentRunner$4.run(ParentRunner.java:330) at 
> org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:78) at 
> org.junit.runners.ParentRunner.runChildren(ParentRunner.java:328) at 
> org.junit.runners.ParentRunner.access$100(ParentRunner.java:65) at 
> org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:292) at 
> org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:305) at 
> org.junit.runners.ParentRunner.run(ParentRunner.java:412) at 
> org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.runTestClass(JUnitTestClassExecutor.java:110)
>  at 
> org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:58)
>  at 
> org.gradle.api.internal.tasks.testing.junit.JUnitTestClassExecutor.execute(JUnitTestClassExecutor.java:38)
>  at 
> org.gradle.api.internal.tasks.testing.junit.AbstractJUnitTestClassProcessor.processTestClass(AbstractJUnitTestClassProcessor.java:62)
>  at 
> org.gradle.api.internal.tasks.testing.SuiteTestClassProcessor.processTestClass(SuiteTestClassProcessor.java:51)
>  at sun.reflect.GeneratedMethodAccessor3.invoke(Unknown Source) at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
>  at 
> org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
>  at 
> org.gradle.internal.dispatch.ContextClassLoaderDispatch.dispatch(ContextClassLoaderDispatch.java:33)
>  at 
> org.gradle.internal.dispatch.ProxyDispatchAdapter$DispatchingInvocationHandler.invoke(ProxyDispatchAdapter.java:94)
>  at com.sun.proxy.$Proxy2.processTestClass(Unknown Source) at 
> org.gradle.api.internal.tasks.testing.worker.TestWorker.processTestClass(TestWorker.java:119)
>  at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source) at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:36)
>  at 
> org.gradle.internal.dispatch.ReflectionDispatch.dispatch(ReflectionDispatch.java:24)
>  at 
> org.gradle.internal.remote.internal.hub.MessageHubBackedObjectConnection$DispatchWrapper.dispatch(MessageHubBackedObjectConnection.java:182)
>  at 
> org.gradle.internal.remote.internal.hub.MessageHubBackedObjectConnection$DispatchWrapper.dispatch(MessageHubBackedObjectConnection.java:164)
>  at 
> org.gradle.internal.remote.internal.hub.MessageHub$Handler.run(MessageHub.java:414)
>  at 
> org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:64)
>  at 
> org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:48)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>  at 
> org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:56)
>  at java.lang.Thread.run(Thread.java:748)
> Standard Output
> Shutting SDK harness down.
> Standard Error
> 20/12/17 00:25:04 WARN org.apache.beam.sdk.coders.SerializableCoder: Can't 
> verify serialized elements of type Shard have well defined equals method. 
> This may produce incorrect results on some PipelineRunner 20/12/17 00:25:04 
> INFO org.apache.beam.runners.jobsubmission.JobServerDriver: 
> ArtifactStagingService started on localhost:34495 20/12/17 00:25:04 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: Java ExpansionService 
> started on localhost:41423 20/12/17 00:25:04 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: JobService started on 
> localhost:45455 20/12/17 00:25:08 INFO 
> org.apache.beam.runners.portability.PortableRunner: Using job server 
> endpoint: localhost:45455 20/12/17 00:25:08 INFO 
> org.apache.beam.runners.portability.PortableRunner: PrepareJobResponse: 
> preparation_id: 
> "metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912"
>  artifact_staging_endpoint { url: "localhost:34495" } staging_session_token: 
> "metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912"
>  20/12/17 00:25:08 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Staging 
> artifacts for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912.
>  20/12/17 00:25:08 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: 
> Resolving artifacts for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912.EMBEDDED.
>  20/12/17 00:25:08 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Getting 
> 313 artifacts for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912.null.
>  20/12/17 00:25:10 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: 
> Artifacts fully staged for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_8790d400-f3c3-447c-9c88-6fd2d5817912.
>  Dec 17, 2020 12:25:10 AM 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyServerHandler 
> onStreamError WARNING: Stream Error 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2Exception$StreamException:
>  Received DATA frame for an unknown stream 3 at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2Exception.streamError(Http2Exception.java:147)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.shouldIgnoreHeadersOrDataFrame(DefaultHttp2ConnectionDecoder.java:591)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onDataRead(DefaultHttp2ConnectionDecoder.java:239)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2InboundFrameLogger$1.onDataRead(Http2InboundFrameLogger.java:48)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readDataFrame(DefaultHttp2FrameReader.java:422)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.processPayloadState(DefaultHttp2FrameReader.java:251)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readFrame(DefaultHttp2FrameReader.java:160)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2InboundFrameLogger.readFrame(Http2InboundFrameLogger.java:41)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder.decodeFrame(DefaultHttp2ConnectionDecoder.java:174)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2ConnectionHandler$FrameDecoder.decode(Http2ConnectionHandler.java:378)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2ConnectionHandler.decode(Http2ConnectionHandler.java:438)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:505)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:283)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:352)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1422)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:931)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:700)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:635)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:552)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:514)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1044)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
>  at java.lang.Thread.run(Thread.java:748) 20/12/17 00:25:10 INFO 
> org.apache.beam.runners.spark.SparkJobInvoker: Invoking job 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  20/12/17 00:25:10 INFO org.apache.beam.runners.jobsubmission.JobInvocation: 
> Starting job invocation 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  20/12/17 00:25:10 INFO org.apache.beam.runners.portability.PortableRunner: 
> RunJobResponse: job_id: 
> "metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354"
>  20/12/17 00:25:10 INFO org.apache.beam.runners.spark.SparkPipelineRunner: 
> Will stage 313 files. (Enable logging at DEBUG level to see which files will 
> be staged.) 20/12/17 00:25:10 INFO 
> org.apache.beam.runners.spark.translation.SparkContextFactory: Creating a 
> brand new Spark Context. 20/12/17 00:25:10 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Running job 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  on Spark master local[4] 20/12/17 00:25:10 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Running job 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  on Spark master local[4] 20/12/17 00:25:10 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Job 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354:
>  Pipeline translated successfully. Computing outputs 20/12/17 00:25:10 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:11 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:11 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:12 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:12 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:13 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:13 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:14 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:14 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:15 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:15 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:16 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:16 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:17 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:17 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:18 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:18 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:19 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:19 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:20 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:20 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:21 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:21 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:22 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:22 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:23 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:23 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:24 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:24 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:25 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:25 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:26 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:26 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:27 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:27 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:31 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:31 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:32 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:32 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:33 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:33 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:34 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:34 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:35 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:35 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:36 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:36 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:37 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:37 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:38 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:38 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:39 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:39 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:40 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:40 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:25:40 WARN 
> org.apache.spark.streaming.util.BatchedWriteAheadLog: BatchedWriteAheadLog 
> Writer queue interrupted. Exception in thread "streaming-job-executor-0" 
> java.lang.Error: java.lang.InterruptedException at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1155)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>  at java.lang.Thread.run(Thread.java:748) Caused by: 
> java.lang.InterruptedException at 
> java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:998)
>  at 
> java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)
>  at scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:206) 
> at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:222) at 
> scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:157) at 
> org.apache.spark.util.ThreadUtils$.awaitReady(ThreadUtils.scala:243) at 
> org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:750) at 
> org.apache.spark.SparkContext.runJob(SparkContext.scala:2061) at 
> org.apache.spark.SparkContext.runJob(SparkContext.scala:2082) at 
> org.apache.spark.SparkContext.runJob(SparkContext.scala:2101) at 
> org.apache.spark.SparkContext.runJob(SparkContext.scala:2126) at 
> org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:972) at 
> org.apache.spark.rdd.RDD$$anonfun$foreach$1.apply(RDD.scala:970) at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
>  at org.apache.spark.rdd.RDD.withScope(RDD.scala:385) at 
> org.apache.spark.rdd.RDD.foreach(RDD.scala:970) at 
> org.apache.spark.api.java.JavaRDDLike$class.foreach(JavaRDDLike.scala:351) at 
> org.apache.spark.api.java.AbstractJavaRDDLike.foreach(JavaRDDLike.scala:45) 
> at 
> org.apache.beam.runners.spark.translation.streaming.UnboundedDataset.lambda$action$e3b46054$1(UnboundedDataset.java:79)
>  at 
> org.apache.spark.streaming.api.java.JavaDStreamLike$$anonfun$foreachRDD$1.apply(JavaDStreamLike.scala:272)
>  at 
> org.apache.spark.streaming.api.java.JavaDStreamLike$$anonfun$foreachRDD$1.apply(JavaDStreamLike.scala:272)
>  at 
> org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:628)
>  at 
> org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:628)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:51)
>  at 
> org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:416)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:50)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
>  at 
> org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:50)
>  at scala.util.Try$.apply(Try.scala:192) at 
> org.apache.spark.streaming.scheduler.Job.run(Job.scala:39) at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:257)
>  at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
>  at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:257)
>  at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58) at 
> org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:256)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>  ... 2 more 20/12/17 00:25:43 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Job 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  finished. 20/12/17 00:25:43 WARN 
> org.apache.spark.streaming.StreamingContext: StreamingContext has already 
> been stopped 20/12/17 00:25:44 ERROR org.apache.spark.executor.Executor: 
> Exception in task 3.0 in stage 0.0 (TID 3): null 20/12/17 00:25:44 ERROR 
> org.apache.spark.executor.Executor: Exception in task 1.0 in stage 0.0 (TID 
> 1): null 20/12/17 00:25:44 ERROR org.apache.spark.executor.Executor: 
> Exception in task 2.0 in stage 0.0 (TID 2): null 20/12/17 00:25:44 ERROR 
> org.apache.spark.executor.Executor: Exception in task 0.0 in stage 0.0 (TID 
> 0): 
> /tmp/spark-08abcca7-760c-4f39-a2f1-eccb21517264/userFiles-2da41c38-ea16-473e-9dc5-dca88d6c3a56/fetchFileTemp286404856321579841.tmp
>  20/12/17 00:25:46 INFO 
> org.apache.beam.runners.jobsubmission.InMemoryJobService: Getting job metrics 
> for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  20/12/17 00:25:46 INFO 
> org.apache.beam.runners.jobsubmission.InMemoryJobService: Finished getting 
> job metrics for 
> metricspushertest0pushesusermetrics-jenkins-1217002508-7df54d1b_69466ce6-1972-45a9-bc4c-d7bbf8b8b354
>  20/12/17 00:25:46 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: JobServer stopped on 
> localhost:45455 20/12/17 00:25:46 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: ArtifactStagingServer 
> stopped on localhost:34495 20/12/17 00:25:46 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: Expansion stopped on 
> localhost:41423 20/12/17 00:25:52 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: ArtifactStagingService 
> started on localhost:42999 20/12/17 00:25:52 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: Java ExpansionService 
> started on localhost:36993 20/12/17 00:25:52 INFO 
> org.apache.beam.runners.jobsubmission.JobServerDriver: JobService started on 
> localhost:41955 20/12/17 00:25:59 INFO 
> org.apache.beam.runners.portability.PortableRunner: Using job server 
> endpoint: localhost:41955 20/12/17 00:26:00 INFO 
> org.apache.beam.runners.portability.PortableRunner: PrepareJobResponse: 
> preparation_id: 
> "metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f"
>  artifact_staging_endpoint { url: "localhost:42999" } staging_session_token: 
> "metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f"
>  20/12/17 00:26:00 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Staging 
> artifacts for 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f.
>  20/12/17 00:26:00 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: 
> Resolving artifacts for 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f.EMBEDDED.
>  20/12/17 00:26:00 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: Getting 
> 313 artifacts for 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f.null.
>  20/12/17 00:26:02 INFO 
> org.apache.beam.runners.fnexecution.artifact.ArtifactStagingService: 
> Artifacts fully staged for 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_5a1c88b9-0280-4c67-8fcc-18a19f44f33f.
>  Dec 17, 2020 12:26:02 AM 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyServerHandler 
> onStreamError WARNING: Stream Error 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2Exception$StreamException:
>  Received DATA frame for an unknown stream 3 at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2Exception.streamError(Http2Exception.java:147)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.shouldIgnoreHeadersOrDataFrame(DefaultHttp2ConnectionDecoder.java:591)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder$FrameReadListener.onDataRead(DefaultHttp2ConnectionDecoder.java:239)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2InboundFrameLogger$1.onDataRead(Http2InboundFrameLogger.java:48)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readDataFrame(DefaultHttp2FrameReader.java:422)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.processPayloadState(DefaultHttp2FrameReader.java:251)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2FrameReader.readFrame(DefaultHttp2FrameReader.java:160)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2InboundFrameLogger.readFrame(Http2InboundFrameLogger.java:41)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.DefaultHttp2ConnectionDecoder.decodeFrame(DefaultHttp2ConnectionDecoder.java:174)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2ConnectionHandler$FrameDecoder.decode(Http2ConnectionHandler.java:378)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.http2.Http2ConnectionHandler.decode(Http2ConnectionHandler.java:438)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:505)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:283)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:352)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1422)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:374)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:360)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:931)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:700)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:635)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:552)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:514)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.SingleThreadEventExecutor$6.run(SingleThreadEventExecutor.java:1044)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
>  at java.lang.Thread.run(Thread.java:748) 20/12/17 00:26:02 INFO 
> org.apache.beam.runners.spark.SparkJobInvoker: Invoking job 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea
>  20/12/17 00:26:02 INFO org.apache.beam.runners.jobsubmission.JobInvocation: 
> Starting job invocation 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea
>  20/12/17 00:26:02 INFO org.apache.beam.runners.portability.PortableRunner: 
> RunJobResponse: job_id: 
> "metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea"
>  20/12/17 00:26:02 INFO org.apache.beam.runners.spark.SparkPipelineRunner: 
> Will stage 313 files. (Enable logging at DEBUG level to see which files will 
> be staged.) 20/12/17 00:26:02 INFO 
> org.apache.beam.runners.spark.translation.SparkContextFactory: Creating a 
> brand new Spark Context. 20/12/17 00:26:02 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Running job 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea
>  on Spark master local[4] 20/12/17 00:26:02 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Running job 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea
>  on Spark master local[4] 20/12/17 00:26:02 INFO 
> org.apache.beam.runners.spark.SparkPipelineRunner: Job 
> metricspushertest0pushessystemmetrics-jenkins-1217002559-d3c16808_b308de0d-6c0c-4a3a-ab49-6fb19e39afea:
>  Pipeline translated successfully. Computing outputs 20/12/17 00:26:02 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:03 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:04 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:04 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:05 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:05 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:06 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:06 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:07 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:07 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:08 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:08 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:09 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:09 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:10 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:10 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:11 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:11 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:12 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:12 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:13 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:13 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:14 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:14 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:15 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:15 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:16 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:16 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:17 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:17 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:18 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:18 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:19 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:19 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:20 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:20 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:21 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:21 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:22 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:22 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:23 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:23 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:24 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:24 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:25 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:25 INFO 
> org.apache.beam.fn.harness.FnHarness: Fn Harness started 20/12/17 00:26:25 
> INFO org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: Beam Fn 
> Logging client connected. 20/12/17 00:26:25 WARN 
> org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: No 
> worker_id header provided in control request 20/12/17 00:26:25 INFO 
> org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: 
> Beam Fn Control client connected with id 20/12/17 00:26:25 INFO 
> org.apache.beam.fn.harness.FnHarness: Entering instruction processing loop 
> 20/12/17 00:26:25 INFO 
> org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: 
> getProcessBundleDescriptor request with id 12-2 20/12/17 00:26:25 INFO 
> org.apache.beam.runners.fnexecution.data.GrpcDataService: Beam Fn Data client 
> connected. 20/12/17 00:26:25 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:25 INFO 
> org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: 
> getProcessBundleDescriptor request with id 12-3 20/12/17 00:26:25 INFO 
> org.apache.beam.runners.fnexecution.control.FnApiControlClientPoolService: 
> getProcessBundleDescriptor request with id 12-4 20/12/17 00:26:26 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:26 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:26 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: Put new watermark 
> block: {0=SparkWatermarks{lowWatermark=294247-01-09T04:00:54.775Z, 
> highWatermark=294247-01-10T04:00:54.775Z, 
> synchronizedProcessingTime=2020-12-17T00:25:10.732Z}} 20/12/17 00:26:26 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164763500 has completed, watermarks have been 
> updated. 20/12/17 00:26:27 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: Put new watermark 
> block: {0=SparkWatermarks{lowWatermark=294247-01-09T04:00:54.775Z, 
> highWatermark=294247-01-10T04:00:54.775Z, 
> synchronizedProcessingTime=2020-12-17T00:26:02.901Z}} 20/12/17 00:26:27 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164764000 has completed, watermarks have been 
> updated. 20/12/17 00:26:27 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164764500 20/12/17 00:26:27 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164764500 has completed, watermarks have been 
> updated. 20/12/17 00:26:27 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:27 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164765000 20/12/17 00:26:27 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164765000 has completed, watermarks have been 
> updated. 20/12/17 00:26:27 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164765500 20/12/17 00:26:27 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164765500 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164766000 20/12/17 00:26:28 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164766000 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164766500 20/12/17 00:26:28 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164766500 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164767000 20/12/17 00:26:28 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164767000 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164767500 20/12/17 00:26:28 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164767500 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:28 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164768000 20/12/17 00:26:28 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164768000 has completed, watermarks have been 
> updated. 20/12/17 00:26:28 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164768500 20/12/17 00:26:29 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164768500 has completed, watermarks have been 
> updated. 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164769000 20/12/17 00:26:29 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164769000 has completed, watermarks have been 
> updated. 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164769500 20/12/17 00:26:29 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164769500 has completed, watermarks have been 
> updated. 20/12/17 00:26:29 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:29 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164770000 20/12/17 00:26:29 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164770000 has completed, watermarks have been 
> updated. 20/12/17 00:26:29 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164770500 20/12/17 00:26:29 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164770500 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164771000 20/12/17 00:26:30 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164771000 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164771500 20/12/17 00:26:30 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164771500 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164772000 20/12/17 00:26:30 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164772000 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164772500 20/12/17 00:26:30 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164772500 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:30 WARN 
> org.apache.spark.streaming.dstream.QueueInputDStream: queueStream doesn't 
> support checkpointing 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new watermarks 
> could be computed upon completion of batch: 1608164773000 20/12/17 00:26:30 
> INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder$WatermarkAdvancingStreamingListener:
>  Batch with timestamp: 1608164773000 has completed, watermarks have been 
> updated. 20/12/17 00:26:30 INFO 
> org.apache.beam.runners.spark.util.GlobalWatermarkHolder: No new waterma 
> ...[truncated 92142 chars]... 
> 16e91dffce836093e018debb3dc4bcdd8c/195-EMBEDDED-commons-math3-3.6.1-HlbXsFjSi2Wr0la4RY44hbZ0wdWI-kPNfRy7nH7yswg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCcommons-math3-3.6.1-HlbXsFjSi2Wr0la4RY44hbZ0wdWI-kPNfRy7nH7yswg.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/196-EMBEDDED-log4j-1.2.17-HTFpZEVpdyBScJF1Q2kIKmZRvUl4G2AF3rlOVnU0Bvk.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<log4j-1.2.17-HTFpZEVpdyBScJF1Q2kIKmZRvUl4G2AF3rlOVnU0Bvk.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\253\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/197-EMBEDDED-compress-lzf-1.0.3-bPk72hwsr2GGUvl9LzbIg6Wpd0NFOEwF01k7FzcxvM0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nBcompress-lzf-1.0.3-bPk72hwsr2GGUvl9LzbIg6Wpd0NFOEwF01k7FzcxvM0.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/198-EMBEDDED-RoaringBitmap-0.7.45-XbXCu45c1TaL0HhPQnpVZmUH1xWMMWr-9OE0a3JGF34.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDRoaringBitmap-0.7.45-XbXCu45c1TaL0HhPQnpVZmUH1xWMMWr-9OE0a3JGF34.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/199-EMBEDDED-commons-net-3.1-NKWNbYClB0gwfmdOwntEEeZTb9EueL7EKOsu5JoSMAc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?commons-net-3.1-NKWNbYClB0gwfmdOwntEEeZTb9EueL7EKOsu5JoSMAc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\270\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/200-EMBEDDED-jersey-container-servlet-2.22.2-JFZ9pC9zBH1vMCsbyvxVLo_d9iGfB8xyd06qxKVtV-c.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nOjersey-container-servlet-2.22.2-JFZ9pC9zBH1vMCsbyvxVLo_d9iGfB8xyd06qxKVtV-c.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\275\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/201-EMBEDDED-jersey-container-servlet-core-2.22.2-fV73Sa6v0i8lvwa0eb6SIBs06KqOmtTb7mm7z03Bzgc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nTjersey-container-servlet-core-2.22.2-fV73Sa6v0i8lvwa0eb6SIBs06KqOmtTb7mm7z03Bzgc.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/202-EMBEDDED-jersey-server-2.22.2-j4ZJtWjQaPBTNi-j3vViBhZt_Os7qnTp8Z7_b4-Nnx8.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDjersey-server-2.22.2-j4ZJtWjQaPBTNi-j3vViBhZt_Os7qnTp8Z7_b4-Nnx8.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/203-EMBEDDED-jersey-client-2.22.2-wiKfdJaNs9DmdvaApYwRSCeN75J0mfby6x6TKrpB-9U.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDjersey-client-2.22.2-wiKfdJaNs9DmdvaApYwRSCeN75J0mfby6x6TKrpB-9U.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\261\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/204-EMBEDDED-jersey-media-jaxb-2.22.2-Cpl4ndTy8kRR989CPVaC2-85o0YJVV9FW3NUaWe5wiU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nHjersey-media-jaxb-2.22.2-Cpl4ndTy8kRR989CPVaC2-85o0YJVV9FW3NUaWe5wiU.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/205-EMBEDDED-jersey-common-2.22.2-M8Ub2n_pTCcFavBca2uxoMKWi1vPCbTAmMy-lTIxGG0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDjersey-common-2.22.2-M8Ub2n_pTCcFavBca2uxoMKWi1vPCbTAmMy-lTIxGG0.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/206-EMBEDDED-netty-all-4.1.51.Final-64-sagv2iF4W_dRT9_Nizf_6cwIhxJBnIbv6tG_h9r8.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFnetty-all-4.1.51.Final-64-sagv2iF4W_dRT9_Nizf_6cwIhxJBnIbv6tG_h9r8.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\252\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/207-EMBEDDED-netty-3.9.9.Final-aXrySOIWyeFYs8nWcC9UVBGI_JLeGaydL1SW-AvOero.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nAnetty-3.9.9.Final-aXrySOIWyeFYs8nWcC9UVBGI_JLeGaydL1SW-AvOero.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/208-EMBEDDED-stream-2.7.0-62Yme-Y7ukXz6dThQ7wykG21ldu-2vtihC5-o0B5bgE.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<stream-2.7.0-62Yme-Y7ukXz6dThQ7wykG21ldu-2vtihC5-o0B5bgE.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\242\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/209-EMBEDDED-ivy-2.4.0-zoHLI0QGsJO1uN6fb1sqUO0IJNaiNYkTU-jT6UGlOXA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n9ivy-2.4.0-zoHLI0QGsJO1uN6fb1sqUO0IJNaiNYkTU-jT6UGlOXA.jar" } dependencies 
> { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\242\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/210-EMBEDDED-oro-2.0.8-4AzNrV3360P97kQjLvZGAr9jgHwtEzp76Dugn9Sa8m4.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n9oro-2.0.8-4AzNrV3360P97kQjLvZGAr9jgHwtEzp76Dugn9Sa8m4.jar" } dependencies 
> { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\246\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/211-EMBEDDED-pyrolite-4.13-S6Qk0yjL16b_cxCMzkDTzDdu72ArlH5X_ceCFx2S5GM.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n=pyrolite-4.13-S6Qk0yjL16b_cxCMzkDTzDdu72ArlH5X_ceCFx2S5GM.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\244\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/212-EMBEDDED-py4j-0.10.7-SdHj6GUypsaMOZ-4NQhgPr7rm5QuDPMB23HHmH_U38s.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n;py4j-0.10.7-SdHj6GUypsaMOZ-4NQhgPr7rm5QuDPMB23HHmH_U38s.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/213-EMBEDDED-commons-crypto-1.0.0-AEPY102N9jLFf5OIKOb279VV4pOpB53N9Z6rjkAQdJE.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDcommons-crypto-1.0.0-AEPY102N9jLFf5OIKOb279VV4pOpB53N9Z6rjkAQdJE.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/214-EMBEDDED-unused-1.0.0-AP0n_JvecBWB59z1uVmB2edJocF2u4v81J9nV2j_a_A.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<unused-1.0.0-AP0n_JvecBWB59z1uVmB2edJocF2u4v81J9nV2j_a_A.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\260\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/215-EMBEDDED-univocity-parsers-2.7.3-_hRHbyRDSi4a1W8e3lYb0BQxRd3fjULjFWjhEpwkG3c.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nGunivocity-parsers-2.7.3-_hRHbyRDSi4a1W8e3lYb0BQxRd3fjULjFWjhEpwkG3c.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/216-EMBEDDED-orc-mapreduce-1.5.5-nohive-duEkgSR_Je5ipD-_Q3ILX8nlAP4Vc8xTV467SBZZE2M.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJorc-mapreduce-1.5.5-nohive-duEkgSR_Je5ipD-_Q3ILX8nlAP4Vc8xTV467SBZZE2M.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\253\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/217-EMBEDDED-leveldbjni-all-1.8-wpchOw5vk5IwWVJ1PzCZpMAucLNlYmb-AYZ-e2wWD_4.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nBleveldbjni-all-1.8-wpchOw5vk5IwWVJ1PzCZpMAucLNlYmb-AYZ-e2wWD_4.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/218-EMBEDDED-hadoop-annotations-2.10.1-OFIe3ZPg29De8bZXcD3x9HhwgIudOezUAtSqPFrPFkI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nIhadoop-annotations-2.10.1-OFIe3ZPg29De8bZXcD3x9HhwgIudOezUAtSqPFrPFkI.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\244\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/219-EMBEDDED-xmlenc-0.52-KCrhhfwv8n2ncUr5liiXwJz--vuIByIZxKL5xzYWwCY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n;xmlenc-0.52-KCrhhfwv8n2ncUr5liiXwJz--vuIByIZxKL5xzYWwCY.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\253\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/220-EMBEDDED-commons-codec-1.14-oSjk-T-r5Tgd7WTPKHMBngYDC3GOtDzurgsOXRetM-k.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nBcommons-codec-1.14-oSjk-T-r5Tgd7WTPKHMBngYDC3GOtDzurgsOXRetM-k.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/221-EMBEDDED-commons-collections-3.2.2-7urpF5FxRKaKdB1MDf9mqlxcX9hVk_8he87T_Iyng7g.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nIcommons-collections-3.2.2-7urpF5FxRKaKdB1MDf9mqlxcX9hVk_8he87T_Iyng7g.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/222-EMBEDDED-servlet-api-2.5-xljqNgpw-u6ttm-zyQpwLkFCoKt3aPmumChnjg2a1Nw.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?servlet-api-2.5-xljqNgpw-u6ttm-zyQpwLkFCoKt3aPmumChnjg2a1Nw.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/223-EMBEDDED-jetty-sslengine-6.1.26-nF9rsWi6AbldJQtX8GHICU4c6cia5OdzSSussXGS6oc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFjetty-sslengine-6.1.26-nF9rsWi6AbldJQtX8GHICU4c6cia5OdzSSussXGS6oc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/224-EMBEDDED-jetty-6.1.26-IQkdOpwTSfZA_cQhUEpgTAQO2JCH7MEq--MjUzJu1OU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<jetty-6.1.26-IQkdOpwTSfZA_cQhUEpgTAQO2JCH7MEq--MjUzJu1OU.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\252\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/225-EMBEDDED-jetty-util-6.1.26-m5dM4rmfSCVLdhJjN9xFshIm84Oq7WFvWXgK2vFnwEc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nAjetty-util-6.1.26-m5dM4rmfSCVLdhJjN9xFshIm84Oq7WFvWXgK2vFnwEc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\244\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/226-EMBEDDED-jsp-api-2.1-VF9OfcZ4_7TPi9D9QLSkRwpAmnh8DqfQrS8I1WESmHs.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n;jsp-api-2.1-VF9OfcZ4_7TPi9D9QLSkRwpAmnh8DqfQrS8I1WESmHs.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\252\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/227-EMBEDDED-jersey-server-1.9-Pe2RsZgHdWG9UfbARCyc1wt1TYsxthr69Ei9qdAYSPA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nAjersey-server-1.9-Pe2RsZgHdWG9UfbARCyc1wt1TYsxthr69Ei9qdAYSPA.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/228-EMBEDDED-jersey-core-1.9-LG0OyI_Iw2y0FjfZwA0GmMIstrahN_pSbveC4A0iZbw.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?jersey-core-1.9-LG0OyI_Iw2y0FjfZwA0GmMIstrahN_pSbveC4A0iZbw.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\251\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/229-EMBEDDED-commons-lang-2.6-UPEbCfh3wpTVbyRGP0fSj5Kc9QRPZIZhwPDPuumi9Jw.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n...@commons-lang-2.6-upebcfh3wptvbyrgp0fsj5kc9qrpzizhwpdpuumi9jw.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/230-EMBEDDED-proto-google-iam-v1-1.0.1-Tb-hkmoSlYFbGhFR9B7BLWXNrhHgCH7-SR2YfezZepA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nIproto-google-iam-v1-1.0.1-Tb-hkmoSlYFbGhFR9B7BLWXNrhHgCH7-SR2YfezZepA.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\272\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/231-EMBEDDED-proto-google-common-protos-1.18.1-fxxqi4XhdQoizkjg_Kmq4N660RMapaBb_xC4RbWxzJQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nQproto-google-common-protos-1.18.1-fxxqi4XhdQoizkjg_Kmq4N660RMapaBb_xC4RbWxzJQ.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/232-EMBEDDED-protobuf-java-3.12.0-qY7VoCcs3aa96Y_hXnlOPZnsBFVNuguo4KSf9c7MXp4.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDprotobuf-java-3.12.0-qY7VoCcs3aa96Y_hXnlOPZnsBFVNuguo4KSf9c7MXp4.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/233-EMBEDDED-gson-2.8.6-yPtIOQVNKAswM_gA0fWpfeLwKOuLoutFitKH5Tbz8l8.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:gson-2.8.6-yPtIOQVNKAswM_gA0fWpfeLwKOuLoutFitKH5Tbz8l8.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\244\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/234-EMBEDDED-jsch-0.1.55-1JKxWm0uo_HMOcQiyVPEDBIokHPb6DYNmMD2-ex0_EQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n;jsch-0.1.55-1JKxWm0uo_HMOcQiyVPEDBIokHPb6DYNmMD2-ex0_EQ.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\266\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/235-EMBEDDED-htrace-core4-4.1.0-incubating-XUW3kEhXw-StNrO8xXvi0sXzCMabX2pYvYaqfUiiXvY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nMhtrace-core4-4.1.0-incubating-XUW3kEhXw-StNrO8xXvi0sXzCMabX2pYvYaqfUiiXvY.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/236-EMBEDDED-woodstox-core-5.0.3-ocBLZPv-IK6fLGCjvxYz_tZoiuMZNba9SkV6G7sugtQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCwoodstox-core-5.0.3-ocBLZPv-IK6fLGCjvxYz_tZoiuMZNba9SkV6G7sugtQ.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/237-EMBEDDED-stax2-api-4.2.1-Z4Vn5ItRpCxlxpnyZlOa09Z21LGlsK19iezoudV3JXk.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?stax2-api-4.2.1-Z4Vn5ItRpCxlxpnyZlOa09Z21LGlsK19iezoudV3JXk.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/238-EMBEDDED-hamcrest-2.1-upOy46ViMiukMvChtTrdzFXLGIJTMZoCDtd_gk5pIFA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<hamcrest-2.1-upOy46ViMiukMvChtTrdzFXLGIJTMZoCDtd_gk5pIFA.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/239-EMBEDDED-reflectasm-1.07-shaded-CKcOrbSydO2u_BGUwfdXBiGlGwqaoDaqFdzbe5J-fHY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFreflectasm-1.07-shaded-CKcOrbSydO2u_BGUwfdXBiGlGwqaoDaqFdzbe5J-fHY.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/240-EMBEDDED-minlog-1.2-pnjLGqj10D2QHJksdXQYQdmKm8PVXa0C6E1lMVxOYPI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:minlog-1.2-pnjLGqj10D2QHJksdXQYQdmKm8PVXa0C6E1lMVxOYPI.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/241-EMBEDDED-j2objc-annotations-1.3-Ia8wySJnvWEiwOC00gzMtmQaN-r5VsZUDsRx1YTmSns.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFj2objc-annotations-1.3-Ia8wySJnvWEiwOC00gzMtmQaN-r5VsZUDsRx1YTmSns.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/242-EMBEDDED-javaruntype-1.3-hGIPTL4YOPBHEuOnoizGWeK7lC6c27i1TX8uRrF9Q-g.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?javaruntype-1.3-hGIPTL4YOPBHEuOnoizGWeK7lC6c27i1TX8uRrF9Q-g.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\244\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/243-EMBEDDED-ognl-3.1.12-dLY_oM2x1HGOaAfy7RAFrC8VpRORDWgDmvmlWRlhlek.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n;ognl-3.1.12-dLY_oM2x1HGOaAfy7RAFrC8VpRORDWgDmvmlWRlhlek.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\260\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/244-EMBEDDED-generics-resolver-2.0.1-LT7P9gZLIjSTlVVcCZQJSTcacMt8Dg_e23EVZrE_KTE.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nGgenerics-resolver-2.0.1-LT7P9gZLIjSTlVVcCZQJSTcacMt8Dg_e23EVZrE_KTE.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\265\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/245-EMBEDDED-auto-value-annotations-1.7.2-hzmNqWKhIQOhlXuyOcVW-OjWbKESv4q5msQEyAojhas.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nLauto-value-annotations-1.7.2-hzmNqWKhIQOhlXuyOcVW-OjWbKESv4q5msQEyAojhas.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\256\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/246-EMBEDDED-opencensus-api-0.24.0-9WGxzCZzhEKI5Zbd9btlloaKhHL9LLiZOVP8XANLI1I.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nEopencensus-api-0.24.0-9WGxzCZzhEKI5Zbd9btlloaKhHL9LLiZOVP8XANLI1I.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/247-EMBEDDED-grpc-context-1.32.2-0H-oAV0WIUvlA0wvEXe_uKBRhqwRIR-eLAJr7PQ6844.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCgrpc-context-1.32.2-0H-oAV0WIUvlA0wvEXe_uKBRhqwRIR-eLAJr7PQ6844.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\260\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/248-EMBEDDED-google-extensions-0.5.1-iwhiythblUnzVf44PGxjgW0vGVKWNOAzrgbQEHqxELk.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nGgoogle-extensions-0.5.1-iwhiythblUnzVf44PGxjgW0vGVKWNOAzrgbQEHqxELk.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\265\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/249-EMBEDDED-flogger-system-backend-0.5.1-aF3jO1PrMTBJu-7n9LeoDdCejnVOlrBIo-2rLOuzZEI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nLflogger-system-backend-0.5.1-aF3jO1PrMTBJu-7n9LeoDdCejnVOlrBIo-2rLOuzZEI.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\255\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/250-EMBEDDED-zookeeper-jute-3.5.7-AjJ2xg8IPcfJGZqfwkbEb-l3XELhhcJ_RTiH4zyDtwU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nDzookeeper-jute-3.5.7-AjJ2xg8IPcfJGZqfwkbEb-l3XELhhcJ_RTiH4zyDtwU.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/251-EMBEDDED-audience-annotations-0.5.0-yCYx8Gx11Gv2Uk2V8NbC467xs-tKe1hMopZiTvDUdL4.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJaudience-annotations-0.5.0-yCYx8Gx11Gv2Uk2V8NbC467xs-tKe1hMopZiTvDUdL4.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/252-EMBEDDED-netty-handler-4.1.51.Final-RGGXDwT01euREq2UJVzhmHOUzmTebDyHaQvwhlyTYlg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJnetty-handler-4.1.51.Final-RGGXDwT01euREq2UJVzhmHOUzmTebDyHaQvwhlyTYlg.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\300\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/253-EMBEDDED-netty-transport-native-epoll-4.1.51.Final-jhp8_fw4knNnkBZ4PgG68gbezxSQmhnAGpocbjesu9M.j"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nYnetty-transport-native-epoll-4.1.51.Final-jhp8_fw4knNnkBZ4PgG68gbezxSQmhnAGpocbjesu9M.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/254-EMBEDDED-hadoop-hdfs-client-2.10.1-PY6m6joD2T6vr-wkECaRkhrDk3J5PT6sa_ybU6-bjkQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nIhadoop-hdfs-client-2.10.1-PY6m6joD2T6vr-wkECaRkhrDk3J5PT6sa_ybU6-bjkQ.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/255-EMBEDDED-shims-0.7.45-jqsADNtdJKUQB6hTc082FGkyfte63kS6gYD9Pk-w_V0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<shims-0.7.45-jqsADNtdJKUQB6hTc082FGkyfte63kS6gYD9Pk-w_V0.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\256\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/256-EMBEDDED-javax.ws.rs-api-2.0.1-OGB9Ym8iiNj7wbH4piw2nmOAbZoxOsfLxfnWyU9LRm0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nEjavax.ws.rs-api-2.0.1-OGB9Ym8iiNj7wbH4piw2nmOAbZoxOsfLxfnWyU9LRm0.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\256\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/257-EMBEDDED-hk2-locator-2.4.0-b34-6kfr9-1W73UQVXEM-tNoQLzDY4PPOHxKljtBRHwGb48.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nEhk2-locator-2.4.0-b34-6kfr9-1W73UQVXEM-tNoQLzDY4PPOHxKljtBRHwGb48.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\252\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/258-EMBEDDED-hk2-api-2.4.0-b34-brBxquoycBWsPaGNUGbDZMGjmXj0tvlGRBWGdcpbnO0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nAhk2-api-2.4.0-b34-brBxquoycBWsPaGNUGbDZMGjmXj0tvlGRBWGdcpbnO0.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/259-EMBEDDED-javax.inject-2.4.0-b34-_b-AoBuFQEW9QAS3xrH9wtqB20db-9CO1XTu_8-aexo.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFjavax.inject-2.4.0-b34-_b-AoBuFQEW9QAS3xrH9wtqB20db-9CO1XTu_8-aexo.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\261\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/260-EMBEDDED-javax.annotation-api-1.2-WQmzlso6K-ENDuoyx073jYFuG06tId4deN4fiQ0DPgQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nHjavax.annotation-api-1.2-WQmzlso6K-ENDuoyx073jYFuG06tId4deN4fiQ0DPgQ.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/261-EMBEDDED-jersey-guava-2.22.2-D9zHXQJa_0Ay07i-kJtaCCkTsn2VOtgt1d8q0prqY2s.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCjersey-guava-2.22.2-D9zHXQJa_0Ay07i-kJtaCCkTsn2VOtgt1d8q0prqY2s.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\264\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/262-EMBEDDED-osgi-resource-locator-1.0.1-d1ADvld-iAb1G25EK-EDPYO-LLIgciezSb4L8W5sCEM.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nKosgi-resource-locator-1.0.1-d1ADvld-iAb1G25EK-EDPYO-LLIgciezSb4L8W5sCEM.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/263-EMBEDDED-validation-api-1.1.0.Final-8517pyU-NfWsSAgewbwoxd-bMqxLfbIIU-Wo52v3sO0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJvalidation-api-1.1.0.Final-8517pyU-NfWsSAgewbwoxd-bMqxLfbIIU-Wo52v3sO0.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\246\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/264-EMBEDDED-janino-3.0.16-9h24Y75jpbOBXYwclGRugdEWiJXv3lrNNIPCQiG5xlU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n=janino-3.0.16-9h24Y75jpbOBXYwclGRugdEWiJXv3lrNNIPCQiG5xlU.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\260\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/265-EMBEDDED-commons-compiler-3.0.16-C4BjaTC2IZexotsEriG57Vz6yXFctE7dbcb2dfQXlwg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nGcommons-compiler-3.0.16-C4BjaTC2IZexotsEriG57Vz6yXFctE7dbcb2dfQXlwg.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/266-EMBEDDED-antlr4-4.7-eGclcCizNzrwEd7nts6bWHqP1cegsl9osv9MuQvoqgc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:antlr4-4.7-eGclcCizNzrwEd7nts6bWHqP1cegsl9osv9MuQvoqgc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\253\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/267-EMBEDDED-antlr4-runtime-4.7-KmGUP4A7vR0OAt_9GbkqQY-DNAyZQ0aAnjtR4iMapsA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nBantlr4-runtime-4.7-KmGUP4A7vR0OAt_9GbkqQY-DNAyZQ0aAnjtR4iMapsA.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\253\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/268-EMBEDDED-aircompressor-0.10-pUcavdyZqVk5q_wEBc3bIhPE-6Vh3pT4iNbmJVZugmw.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nBaircompressor-0.10-pUcavdyZqVk5q_wEBc3bIhPE-6Vh3pT4iNbmJVZugmw.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/269-EMBEDDED-parquet-jackson-1.10.1-m8RDI886Nr-xqxl_W48rE6OiYTuqBIm7JSszVTVipSg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFparquet-jackson-1.10.1-m8RDI886Nr-xqxl_W48rE6OiYTuqBIm7JSszVTVipSg.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/270-EMBEDDED-arrow-format-0.10.0-ITh71gEtmLvHCD80n5Vp3EeYzXLFt8mqcTCSu84ZOes.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCarrow-format-0.10.0-ITh71gEtmLvHCD80n5Vp3EeYzXLFt8mqcTCSu84ZOes.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/271-EMBEDDED-hppc-0.7.2-ez3WZh6D4xPXC0qoLFGAuzlTXlNqNDX6dB__lydDO2o.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:hppc-0.7.2-ez3WZh6D4xPXC0qoLFGAuzlTXlNqNDX6dB__lydDO2o.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/272-EMBEDDED-flatbuffers-1.2.0-3f79e055-dD-XMWCWum6FKJFOorBi9qAvyR7HPJilpGJA1tZ-aJg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJflatbuffers-1.2.0-3f79e055-dD-XMWCWum6FKJFOorBi9qAvyR7HPJilpGJA1tZ-aJg.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\270\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/273-EMBEDDED-animal-sniffer-annotations-1.18-R_BYUrSO6brv74D6PYzqYO-kdTwAExId1_5e7y5ccp0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nOanimal-sniffer-annotations-1.18-R_BYUrSO6brv74D6PYzqYO-kdTwAExId1_5e7y5ccp0.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/274-EMBEDDED-httpcore-4.4.13-4G6J1AlDJF_Po57FN82_zjdirs3o-cWXeA0rAMK0NCQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?httpcore-4.4.13-4G6J1AlDJF_Po57FN82_zjdirs3o-cWXeA0rAMK0NCQ.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/275-EMBEDDED-jettison-1.1-N3lAKIsGQ8SHgBN_b2hXiTfh6lyitzgwqCDFCnt-2AE.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<jettison-1.1-N3lAKIsGQ8SHgBN_b2hXiTfh6lyitzgwqCDFCnt-2AE.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/276-EMBEDDED-jaxb-impl-2.3.3-5ReNDHlIJH91oTxom_NvTV1JEKEh9xKqOyCulDdwadg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?jaxb-impl-2.3.3-5ReNDHlIJH91oTxom_NvTV1JEKEh9xKqOyCulDdwadg.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\240\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/277-EMBEDDED-asm-3.1-Mz_1NpBDl1t-AxuLJyBpN0QYVHOOA4wfR_mNByogQ3o.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n7asm-3.1-Mz_1NpBDl1t-AxuLJyBpN0QYVHOOA4wfR_mNByogQ3o.jar" } dependencies { 
> type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/278-EMBEDDED-java-xmlbuilder-0.4-aB5TxP_Vn6EgaIA7JZ46g9Q_B6R8ES50ihh97heesx8.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCjava-xmlbuilder-0.4-aB5TxP_Vn6EgaIA7JZ46g9Q_B6R8ES50ihh97heesx8.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/279-EMBEDDED-nimbus-jose-jwt-7.9-tPWEU-GAqYHrdEoZtNVq-xLxDD3TXnUxzFjubL9b8oY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCnimbus-jose-jwt-7.9-tPWEU-GAqYHrdEoZtNVq-xLxDD3TXnUxzFjubL9b8oY.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\247\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/280-EMBEDDED-json-smart-2.3-kD9IyKpMP2QmRAuNMt6J-h3COxFpq94l5OHQaKpncIs.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n>json-smart-2.3-kD9IyKpMP2QmRAuNMt6J-h3COxFpq94l5OHQaKpncIs.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/281-EMBEDDED-accessors-smart-1.2-DHwmXWL8AHEk3DK5EzbpxCcmUdYpvF-hpOTjvHWOsuQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCaccessors-smart-1.2-DHwmXWL8AHEk3DK5EzbpxCcmUdYpvF-hpOTjvHWOsuQ.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\242\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/282-EMBEDDED-asm-5.0.4-iWYY7YrmJwJSGni8e-QrfEkaCOaSChX4mj7N7DHpoiA.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n9asm-5.0.4-iWYY7YrmJwJSGni8e-QrfEkaCOaSChX4mj7N7DHpoiA.jar" } dependencies 
> { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\242\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/283-EMBEDDED-ST4-4.0.8-WMqrxAyfdLC1mT_YaOD2SlDAdZCU5qJRqq-tmO38ejs.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n9ST4-4.0.8-WMqrxAyfdLC1mT_YaOD2SlDAdZCU5qJRqq-tmO38ejs.jar" } dependencies 
> { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/284-EMBEDDED-antlr-runtime-3.5.2-zj_I7LEPOemjzdy7LONQ0nLZzT0LHhjm_nPDuTichzQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCantlr-runtime-3.5.2-zj_I7LEPOemjzdy7LONQ0nLZzT0LHhjm_nPDuTichzQ.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/285-EMBEDDED-javassist-3.20.0-GA-12kQYvt3nCOBZAyPcqy6LCOHOwHCQ4ZtQcFdxMiEjqI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCjavassist-3.20.0-GA-12kQYvt3nCOBZAyPcqy6LCOHOwHCQ4ZtQcFdxMiEjqI.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\246\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/286-EMBEDDED-flogger-0.5.1-tezRSD4EEZcBJ4b3SZaKYgY8GWTT7Pv5a6kqlXl7uPU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n=flogger-0.5.1-tezRSD4EEZcBJ4b3SZaKYgY8GWTT7Pv5a6kqlXl7uPU.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/287-EMBEDDED-checker-compat-qual-2.5.3-12ua_qYcfAgpCAI_DLwUJ_q5q9LfkVyLij56UJvMvG0.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nIchecker-compat-qual-2.5.3-12ua_qYcfAgpCAI_DLwUJ_q5q9LfkVyLij56UJvMvG0.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\261\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/288-EMBEDDED-netty-codec-4.1.51.Final-_3QaqjX3BIpr58cAqkhRv2Q5F2SOpbfAy62i84SMK-4.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nHnetty-codec-4.1.51.Final-_3QaqjX3BIpr58cAqkhRv2Q5F2SOpbfAy62i84SMK-4.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\300\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/289-EMBEDDED-netty-transport-native-unix-common-4.1.51.Final-FHWV_0ViQv0bMtHmzXgKZluNjgOk4b9W2ot3Kzh"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n_netty-transport-native-unix-common-4.1.51.Final-FHWV_0ViQv0bMtHmzXgKZluNjgOk4b9W2ot3Kzhjxtk.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\265\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/290-EMBEDDED-netty-transport-4.1.51.Final-5b4lnzWiRr9QStk-qPXfMYcrWr6_t1E4DquV1dyEDUQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nLnetty-transport-4.1.51.Final-5b4lnzWiRr9QStk-qPXfMYcrWr6_t1E4DquV1dyEDUQ.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\264\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/291-EMBEDDED-netty-resolver-4.1.51.Final-yKd3ZeSB-_WQbFlutEHeSQlrNUvK4DVrdASsXpY5k1A.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nKnetty-resolver-4.1.51.Final-yKd3ZeSB-_WQbFlutEHeSQlrNUvK4DVrdASsXpY5k1A.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/292-EMBEDDED-netty-buffer-4.1.51.Final-w8O3EOG1qN89YM1GAuCnQ0gdXmCeSqhS-iYp5OQS0kU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nInetty-buffer-4.1.51.Final-w8O3EOG1qN89YM1GAuCnQ0gdXmCeSqhS-iYp5OQS0kU.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\262\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/293-EMBEDDED-netty-common-4.1.51.Final-EQ4GUV9DkTorusI-GqeLf1muCdRmsAr1_POZpPmvG2s.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nInetty-common-4.1.51.Final-EQ4GUV9DkTorusI-GqeLf1muCdRmsAr1_POZpPmvG2s.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/294-EMBEDDED-minlog-1.3.0-97OZ06VHik8-DZi9HJ9HdmEZxmQUvDOqD2zeAGbyTMI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<minlog-1.3.0-97OZ06VHik8-DZi9HJ9HdmEZxmQUvDOqD2zeAGbyTMI.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\245\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/295-EMBEDDED-okhttp-2.7.5-iKyf0btR-CvMZkzB65wiXJDcQ4nWYCMbTMc3vr_n0Ko.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n<okhttp-2.7.5-iKyf0btR-CvMZkzB65wiXJDcQ4nWYCMbTMc3vr_n0Ko.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/296-EMBEDDED-hk2-utils-2.4.0-b34-cCEbH5GIGb9q-_adPRnUrm4qddbib2w5up8g645WEtc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nChk2-utils-2.4.0-b34-cCEbH5GIGb9q-_adPRnUrm4qddbib2w5up8g645WEtc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\271\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/297-EMBEDDED-aopalliance-repackaged-2.4.0-b34-XTywzs5yLHuoq5h7kxBTzbywyxKtXIyKdpHrb35gpks.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nPaopalliance-repackaged-2.4.0-b34-XTywzs5yLHuoq5h7kxBTzbywyxKtXIyKdpHrb35gpks.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\261\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/298-EMBEDDED-servlet-api-2.5-20081211-BodWCWmW_gD2BKw7ZnLW9mPcd36kqDBW4kDQRW535HI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nHservlet-api-2.5-20081211-BodWCWmW_gD2BKw7ZnLW9mPcd36kqDBW4kDQRW535HI.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\263\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/299-EMBEDDED-jakarta.xml.bind-api-2.3.3-wEU59HLppt0MdoXqgtZ3KCJpq457rKLhRQDjgeDGzsU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nJjakarta.xml.bind-api-2.3.3-wEU59HLppt0MdoXqgtZ3KCJpq457rKLhRQDjgeDGzsU.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\261\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/300-EMBEDDED-jakarta.activation-1.2.2-AhVnc-SunQSNFKVq011kS-6fEFKnkdBy3z3tPGVubho.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nHjakarta.activation-1.2.2-AhVnc-SunQSNFKVq011kS-6fEFKnkdBy3z3tPGVubho.jar" 
> } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\257\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/301-EMBEDDED-jcip-annotations-1.0-1-T8z_g4Kq_FiZYsTtsmL2qlleNPHhHmEFfRxqluj8cyM.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nFjcip-annotations-1.0-1-T8z_g4Kq_FiZYsTtsmL2qlleNPHhHmEFfRxqluj8cyM.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\270\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/302-EMBEDDED-org.abego.treelayout.core-1.0.3--l4xOVw5wufUasoPgfcgYJMWB7L6Qb02A46yy2-5MyY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nOorg.abego.treelayout.core-1.0.3--l4xOVw5wufUasoPgfcgYJMWB7L6Qb02A46yy2-5MyY.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\251\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/303-EMBEDDED-javax.json-1.0.4-Dh3sQKHt6WWUElHtqWiu7gUsxPUDeLwxbMSOgVm9vrQ.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n...@javax.json-1.0.4-dh3sqkht6wwuelhtqwiu7gusxpudelwxbmsogvm9vrq.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/304-EMBEDDED-icu4j-58.2-lT4eg7K-fD6i-I2obBNhT0fp5x01eMhSHX8Yd1a2OWI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:icu4j-58.2-lT4eg7K-fD6i-I2obBNhT0fp5x01eMhSHX8Yd1a2OWI.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\254\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/305-EMBEDDED-annotations-4.1.1.4-unNOHoTAnWFa9qCdMwNLTwRC-Hct7BIO-zdthqVlrhU.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nCannotations-4.1.1.4-unNOHoTAnWFa9qCdMwNLTwRC-Hct7BIO-zdthqVlrhU.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\243\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/306-EMBEDDED-okio-1.6.0-EUvcH0czimi8vJWr8vXNxyvu7JGBLy_Ne1IcGTeHYmY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n:okio-1.6.0-EUvcH0czimi8vJWr8vXNxyvu7JGBLy_Ne1IcGTeHYmY.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\247\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/307-EMBEDDED-stax-api-1.0-2-6McOvXb5gslYKoLvgs9s4Up9WKSk3KXLe3_JiMgAibc.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n>stax-api-1.0-2-6McOvXb5gslYKoLvgs9s4Up9WKSk3KXLe3_JiMgAibc.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\242\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/308-EMBEDDED-guice-3.0-GlnQQh_9NVzAtwtC3xwumvdEyKLQyS2jefX8ovB_HSI.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n9guice-3.0-GlnQQh_9NVzAtwtC3xwumvdEyKLQyS2jefX8ovB_HSI.jar" } dependencies 
> { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\247\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/309-EMBEDDED-javax.inject-1-kcdwRKUMSBY2wy2Rb9ickRinIZU5BFLIEGUID5V95_8.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n>javax.inject-1-kcdwRKUMSBY2wy2Rb9ickRinIZU5BFLIEGUID5V95_8.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\275\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/310-EMBEDDED-geronimo-jcache_1.0_spec-1.0-alpha-1-AHChLlj0kblXGTkTJSmaYpRTDubDziXlC9yYsLcAlmw.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nTgeronimo-jcache_1.0_spec-1.0-alpha-1-AHChLlj0kblXGTkTJSmaYpRTDubDziXlC9yYsLcAlmw.jar"
>  } dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\256\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/311-EMBEDDED-mssql-jdbc-6.2.1.jre7-nPollFCuNHHS5uLD2K78ziNuPa74s3NNIdyTw6W76AY.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nEmssql-jdbc-6.2.1.jre7-nPollFCuNHHS5uLD2K78ziNuPa74s3NNIdyTw6W76AY.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\250\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/312-EMBEDDED-aopalliance-1.0-Ct3sZw_tzT8RPFyAkdeDKA0j9146y4QbYanNsHk3agg.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\n?aopalliance-1.0-Ct3sZw_tzT8RPFyAkdeDKA0j9146y4QbYanNsHk3agg.jar" } 
> dependencies { type_urn: "beam:artifact:type:file:v1" type_payload: 
> "\n\256\001/tmp/beam-artifact-staging/d157d116f4e061561d06bd83fe9c7416e91dffce836093e018debb3dc4bcdd8c/313-EMBEDDED-cglib-2.2.1-v20090111-QuHfsmvsvxpjPyW0fjn8xCK4XnfkwEaNmkT4hfX6C-I.jar"
>  role_urn: "beam:artifact:role:staging_to:v1" role_payload: 
> "\nEcglib-2.2.1-v20090111-QuHfsmvsvxpjPyW0fjn8xCK4XnfkwEaNmkT4hfX6C-I.jar" } 
> 20/12/17 00:26:36 INFO 
> org.apache.beam.runners.fnexecution.logging.GrpcLoggingService: 1 Beam Fn 
> Logging clients still connected during shutdown. 20/12/17 00:26:36 WARN 
> org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: Hanged up for unknown 
> endpoint. 20/12/17 00:26:36 ERROR 
> org.apache.beam.sdk.fn.data.BeamFnDataGrpcMultiplexer: Failed to handle for 
> url: "InProcessServer_70" 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException: 
> CANCELLED: Multiplexer hanging up at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status.asRuntimeException(Status.java:533)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientCalls$StreamObserverToCallListenerAdapter.onClose(ClientCalls.java:449)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.PartialForwardingClientCallListener.onClose(PartialForwardingClientCallListener.java:39)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCallListener.onClose(ForwardingClientCallListener.java:23)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCallListener$SimpleForwardingClientCallListener.onClose(ForwardingClientCallListener.java:40)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.CensusStatsModule$StatsClientInterceptor$1$1.onClose(CensusStatsModule.java:700)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.PartialForwardingClientCallListener.onClose(PartialForwardingClientCallListener.java:39)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCallListener.onClose(ForwardingClientCallListener.java:23)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCallListener$SimpleForwardingClientCallListener.onClose(ForwardingClientCallListener.java:40)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.CensusTracingModule$TracingClientInterceptor$1$1.onClose(CensusTracingModule.java:399)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl.closeObserver(ClientCallImpl.java:521)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl.access$300(ClientCallImpl.java:66)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl.close(ClientCallImpl.java:641)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl.access$700(ClientCallImpl.java:529)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl$1StreamClosed.runInternal(ClientCallImpl.java:703)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ClientCallImpl$ClientStreamListenerImpl$1StreamClosed.runInContext(ClientCallImpl.java:692)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
>  at 
> org.apache.beam.vendor.grpc.v1p26p0.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
>  at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
>  at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
>  at java.lang.Thread.run(Thread.java:748)
> {code}
>  



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to