See <https://ci-beam.apache.org/job/beam_LoadTests_Java_Combine_SparkStructuredStreaming_Batch/1034/display/redirect?page=changes>
Changes: [noreply] [#24789][prism] Add testdofns for later (#25557) [noreply] [#24789][prism] Minimum required jobservices (#25556) [noreply] [#24789][prism] Handlers for combine, ParDo, GBK, Flatten (#25558) [noreply] [prism] Tiny lint fixes (#25564) [noreply] minor error handling fixes (#25555) [noreply] [prism] Add in element manager (#25565) ------------------------------------------ [...truncated 58.53 KB...] > Task :model:job-management:processResources UP-TO-DATE > Task :model:fn-execution:processResources UP-TO-DATE > Task > :sdks:java:extensions:google-cloud-platform-core:createCheckerFrameworkManifest > UP-TO-DATE > Task :runners:spark:3:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:extensions:google-cloud-platform-core:processResources > NO-SOURCE > Task :sdks:java:extensions:protobuf:createCheckerFrameworkManifest UP-TO-DATE > Task :runners:spark:3:processResources UP-TO-DATE > Task :sdks:java:extensions:protobuf:extractProto UP-TO-DATE > Task :sdks:java:expansion-service:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:io:kafka:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:io:google-cloud-platform:createCheckerFrameworkManifest > UP-TO-DATE > Task :sdks:java:io:kinesis:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:extensions:arrow:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:expansion-service:processResources NO-SOURCE > Task :sdks:java:extensions:protobuf:processResources NO-SOURCE > Task :sdks:java:io:google-cloud-platform:processResources NO-SOURCE > Task :sdks:java:io:kafka:processResources NO-SOURCE > Task :sdks:java:io:kinesis:processResources NO-SOURCE > Task :sdks:java:extensions:arrow:processResources NO-SOURCE > Task :sdks:java:core:generateGrammarSource UP-TO-DATE > Task :sdks:java:testing:test-utils:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:testing:load-tests:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:io:synthetic:createCheckerFrameworkManifest UP-TO-DATE > Task :sdks:java:core:processResources UP-TO-DATE > Task :sdks:java:testing:test-utils:processResources NO-SOURCE > Task :sdks:java:testing:load-tests:processResources NO-SOURCE > Task :sdks:java:io:synthetic:processResources NO-SOURCE > Task :model:pipeline:extractIncludeProto UP-TO-DATE > Task :model:pipeline:extractProto UP-TO-DATE > Task :model:pipeline:generateProto UP-TO-DATE > Task :model:pipeline:compileJava UP-TO-DATE > Task :model:pipeline:processResources UP-TO-DATE > Task :model:pipeline:classes UP-TO-DATE > Task :model:pipeline:jar UP-TO-DATE > Task :model:fn-execution:extractIncludeProto UP-TO-DATE > Task :model:job-management:extractIncludeProto UP-TO-DATE > Task :model:pipeline:shadowJar UP-TO-DATE > Task :model:job-management:generateProto UP-TO-DATE > Task :model:fn-execution:generateProto UP-TO-DATE > Task :model:job-management:compileJava UP-TO-DATE > Task :model:job-management:classes UP-TO-DATE > Task :model:fn-execution:compileJava UP-TO-DATE > Task :model:fn-execution:classes UP-TO-DATE > Task :model:job-management:shadowJar UP-TO-DATE > Task :model:fn-execution:shadowJar UP-TO-DATE > Task :sdks:java:core:compileJava UP-TO-DATE > Task :sdks:java:core:classes UP-TO-DATE > Task :sdks:java:core:shadowJar UP-TO-DATE > Task :sdks:java:extensions:protobuf:extractIncludeProto UP-TO-DATE > Task :sdks:java:extensions:protobuf:generateProto NO-SOURCE > Task :sdks:java:extensions:arrow:compileJava UP-TO-DATE > Task :sdks:java:io:synthetic:compileJava UP-TO-DATE > Task :sdks:java:extensions:arrow:classes UP-TO-DATE > Task :sdks:java:io:synthetic:classes UP-TO-DATE > Task :sdks:java:extensions:arrow:jar UP-TO-DATE > Task :sdks:java:io:synthetic:jar UP-TO-DATE > Task :sdks:java:fn-execution:compileJava UP-TO-DATE > Task :sdks:java:fn-execution:classes UP-TO-DATE > Task :sdks:java:extensions:protobuf:compileJava UP-TO-DATE > Task :sdks:java:fn-execution:jar UP-TO-DATE > Task :sdks:java:extensions:protobuf:classes UP-TO-DATE > Task :sdks:java:io:kinesis:compileJava UP-TO-DATE > Task :sdks:java:io:kinesis:classes UP-TO-DATE > Task :sdks:java:extensions:protobuf:jar UP-TO-DATE > Task :sdks:java:io:kinesis:jar UP-TO-DATE > Task :sdks:java:testing:test-utils:compileJava UP-TO-DATE > Task :sdks:java:testing:test-utils:classes UP-TO-DATE > Task :sdks:java:testing:test-utils:jar UP-TO-DATE > Task :runners:core-construction-java:compileJava UP-TO-DATE > Task :runners:core-construction-java:classes UP-TO-DATE > Task :runners:core-construction-java:jar UP-TO-DATE > Task :runners:core-java:compileJava UP-TO-DATE > Task :runners:core-java:classes UP-TO-DATE > Task :runners:core-java:jar UP-TO-DATE > Task :sdks:java:extensions:google-cloud-platform-core:compileJava UP-TO-DATE > Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE > Task :sdks:java:extensions:google-cloud-platform-core:jar UP-TO-DATE > Task :sdks:java:harness:compileJava UP-TO-DATE > Task :sdks:java:harness:classes UP-TO-DATE > Task :sdks:java:harness:jar UP-TO-DATE > Task :runners:java-fn-execution:compileJava UP-TO-DATE > Task :runners:java-fn-execution:classes UP-TO-DATE > Task :runners:java-fn-execution:jar UP-TO-DATE > Task :sdks:java:expansion-service:compileJava UP-TO-DATE > Task :sdks:java:expansion-service:classes UP-TO-DATE > Task :sdks:java:expansion-service:jar UP-TO-DATE > Task :runners:java-job-service:compileJava UP-TO-DATE > Task :runners:java-job-service:classes UP-TO-DATE > Task :runners:java-job-service:jar UP-TO-DATE > Task :sdks:java:io:kafka:compileJava UP-TO-DATE > Task :sdks:java:io:kafka:classes UP-TO-DATE > Task :sdks:java:io:kafka:jar UP-TO-DATE > Task :sdks:java:io:google-cloud-platform:compileJava UP-TO-DATE > Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE > Task :sdks:java:io:google-cloud-platform:jar UP-TO-DATE > Task :sdks:java:testing:load-tests:compileJava UP-TO-DATE > Task :sdks:java:testing:load-tests:classes UP-TO-DATE > Task :sdks:java:testing:load-tests:jar UP-TO-DATE > Task :runners:spark:3:compileJava UP-TO-DATE > Task :runners:spark:3:classes UP-TO-DATE > Task :runners:spark:3:jar UP-TO-DATE > Task :sdks:java:testing:load-tests:run SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/home/jenkins/.gradle/caches/modules-2/files-2.1/org.slf4j/slf4j-log4j12/1.7.30/c21f55139d8141d2231214fb1feaf50a1edca95e/slf4j-log4j12-1.7.30.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/home/jenkins/.gradle/caches/modules-2/files-2.1/org.slf4j/slf4j-reload4j/1.7.36/db708f7d959dee1857ac524636e85ecf2e1781c1/slf4j-reload4j-1.7.36.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 23/02/21 12:57:54 WARN org.apache.beam.sdk.Pipeline: The following transforms do not have stable unique names: Collect end time metric 23/02/21 12:57:54 INFO org.apache.beam.runners.spark.structuredstreaming.SparkStructuredStreamingRunner: *** SparkStructuredStreamingRunner is based on spark structured streaming framework and is no more based on RDD/DStream API. See https://spark.apache.org/docs/latest/structured-streaming-programming-guide.html It is still experimental, its coverage of the Beam model is partial. *** 23/02/21 12:57:54 INFO org.apache.beam.runners.spark.structuredstreaming.translation.SparkSessionFactory: Configured `spark.serializer` to use KryoSerializer [unsafe=true] 23/02/21 12:57:55 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 23/02/21 12:57:56 INFO org.sparkproject.jetty.util.log: Logging initialized @5013ms to org.sparkproject.jetty.util.log.Slf4jLog 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.Server: jetty-9.4.40.v20210413; built: 2021-04-13T20:42:42.668Z; git: b881a572662e1943a14ae12e7e1207989f218b74; jvm 1.8.0_352-8u352-ga-1~20.04-b08 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.Server: Started @5112ms 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.AbstractConnector: Started ServerConnector@526a7a45{HTTP/1.1, (http/1.1)}{127.0.0.1:4040} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@51c8f62c{/jobs,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7ff19c33{/jobs/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7cca01a8{/jobs/job,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7a4d582c{/jobs/job/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@45e9b12d{/stages,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@2d55e826{/stages/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@45f756e6{/stages/stage,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7d247660{/stages/stage/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@49d30c6f{/stages/pool,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4fdca00a{/stages/pool/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5a8c93{/storage,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@119b0892{/storage/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4ed4a7e4{/storage/rdd,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@55651434{/storage/rdd/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@50448409{/environment,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@2b0dc227{/environment/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@43bdaa1b{/executors,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@75e09567{/executors/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@470d183{/executors/threadDump,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@ea52184{/executors/threadDump/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@3c854752{/static,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@619f2afc{/,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4db60246{/api,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@34d45ec0{/jobs/job/kill,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@727320fa{/stages/stage/kill,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@46911148{/metrics/json,null,AVAILABLE,@Spark} 23/02/21 12:57:56 INFO org.apache.beam.runners.spark.structuredstreaming.metrics.MetricsAccumulator: Instantiated metrics accumulator: MetricQueryResults() 23/02/21 12:57:57 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Read input 23/02/21 12:57:59 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@28ebc300{/SQL,null,AVAILABLE,@Spark} 23/02/21 12:57:59 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4b57885c{/SQL/json,null,AVAILABLE,@Spark} 23/02/21 12:57:59 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@3b243c07{/SQL/execution,null,AVAILABLE,@Spark} 23/02/21 12:57:59 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@54004ae1{/SQL/execution/json,null,AVAILABLE,@Spark} 23/02/21 12:57:59 INFO org.sparkproject.jetty.server.handler.ContextHandler: Started o.s.j.s.ServletContextHandler@16159e60{/static/sql,null,AVAILABLE,@Spark} 23/02/21 12:58:00 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect start time metric/ParMultiDo(TimeMonitor) 23/02/21 12:58:00 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect metrics/ParMultiDo(ByteMonitor) 23/02/21 12:58:00 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Window.Into()/Window.Assign 23/02/21 12:58:00 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Dataset Window.Into()/Window.Assign.out will be cached in-memory as RDD for reuse. 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Convert to Long: 0/Map/ParMultiDo(Anonymous) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating composite: Combine: 0 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect end time metric/ParMultiDo(TimeMonitor) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Convert to Long: 1/Map/ParMultiDo(Anonymous) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating composite: Combine: 1 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect end time metric2/ParMultiDo(TimeMonitor) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Convert to Long: 2/Map/ParMultiDo(Anonymous) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating composite: Combine: 2 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect end time metric3/ParMultiDo(TimeMonitor) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Convert to Long: 3/Map/ParMultiDo(Anonymous) 23/02/21 12:58:01 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating composite: Combine: 3 23/02/21 12:58:02 INFO org.apache.beam.runners.spark.structuredstreaming.translation.PipelineTranslator: Translating primitive: Collect end time metric4/ParMultiDo(TimeMonitor) 23/02/21 12:58:22 INFO org.apache.beam.runners.spark.structuredstreaming.translation.EvaluationContext: Evaluated dataset Collect end time metric2/ParMultiDo(TimeMonitor).output in 20.6 s 23/02/21 12:58:34 INFO org.apache.beam.runners.spark.structuredstreaming.translation.EvaluationContext: Evaluated dataset Collect end time metric4/ParMultiDo(TimeMonitor).output in 11.6 s 23/02/21 12:58:45 INFO org.apache.beam.runners.spark.structuredstreaming.translation.EvaluationContext: Evaluated dataset Collect end time metric3/ParMultiDo(TimeMonitor).output in 11.1 s 23/02/21 12:58:56 INFO org.apache.beam.runners.spark.structuredstreaming.translation.EvaluationContext: Evaluated dataset Collect end time metric/ParMultiDo(TimeMonitor).output in 11.0 s 23/02/21 12:58:56 INFO org.sparkproject.jetty.server.AbstractConnector: Stopped Spark@526a7a45{HTTP/1.1, (http/1.1)}{127.0.0.1:4040} Load test results for test (ID): 07997db9-300e-44b7-b6b1-b1a1a437c05e and timestamp: 2023-02-21T12:57:54.240000000Z: Metric: Value: sparkstructuredstreaming_runtime_sec 51.79 sparkstructuredstreaming_total_bytes_count 5.0E8 Deprecated Gradle features were used in this build, making it incompatible with Gradle 8.0. You can use '--warning-mode all' to show the individual deprecation warnings and determine if they come from your own scripts or plugins. See https://docs.gradle.org/7.5.1/userguide/command_line_interface.html#sec:command_line_warnings BUILD SUCCESSFUL in 1m 24s 92 actionable tasks: 1 executed, 91 up-to-date Publishing build scan... https://gradle.com/s/r2i6zksaoqrv2 FATAL: command execution failed java.io.IOException: Remote call on apache-beam-jenkins-10 failed at hudson.remoting.Channel.call(Channel.java:1004) at hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:285) at com.sun.proxy.$Proxy143.isAlive(Unknown Source) at hudson.Launcher$RemoteLauncher$ProcImpl.isAlive(Launcher.java:1215) at hudson.Launcher$RemoteLauncher$ProcImpl.join(Launcher.java:1207) at hudson.Launcher$ProcStarter.join(Launcher.java:524) at hudson.plugins.gradle.Gradle.perform(Gradle.java:317) at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20) at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:814) at hudson.model.Build$BuildExecution.build(Build.java:199) at hudson.model.Build$BuildExecution.doRun(Build.java:164) at hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:522) at hudson.model.Run.execute(Run.java:1896) at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:44) at hudson.model.ResourceController.execute(ResourceController.java:101) at hudson.model.Executor.run(Executor.java:442) Caused by: java.lang.OutOfMemoryError: unable to create native thread: possibly out of memory or process/resource limits reached at java.base/java.lang.Thread.start0(Native Method) at java.base/java.lang.Thread.start(Thread.java:798) at java.base/java.util.concurrent.ThreadPoolExecutor.addWorker(ThreadPoolExecutor.java:937) at java.base/java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1354) at java.base/java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:140) at jenkins.util.InterceptingExecutorService.submit(InterceptingExecutorService.java:36) at jenkins.util.InterceptingExecutorService.submit(InterceptingExecutorService.java:36) at hudson.remoting.DelegatingExecutorService.submit(DelegatingExecutorService.java:52) at hudson.remoting.InterceptingExecutorService.submit(InterceptingExecutorService.java:50) at hudson.remoting.InterceptingExecutorService.submit(InterceptingExecutorService.java:44) at org.jenkinsci.remoting.util.AnonymousClassWarnings.check(AnonymousClassWarnings.java:66) at hudson.remoting.MultiClassLoaderSerializer$Output.annotateClass(MultiClassLoaderSerializer.java:48) at java.base/java.io.ObjectOutputStream.writeNonProxyDesc(ObjectOutputStream.java:1281) at java.base/java.io.ObjectOutputStream.writeClassDesc(ObjectOutputStream.java:1222) at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1418) at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1169) at java.base/java.io.ObjectOutputStream.writeArray(ObjectOutputStream.java:1369) at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1165) at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1543) at java.base/java.io.ObjectOutputStream.defaultWriteObject(ObjectOutputStream.java:438) at java.base/java.lang.Throwable.writeObject(Throwable.java:996) at java.base/jdk.internal.reflect.GeneratedMethodAccessor72.invoke(Unknown Source) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at java.base/java.io.ObjectStreamClass.invokeWriteObject(ObjectStreamClass.java:1016) at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1487) at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1423) at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1169) at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1543) at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1500) at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1423) at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1169) at java.base/java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:345) at hudson.remoting.UserRequest._serialize(UserRequest.java:263) at hudson.remoting.UserRequest.serialize(UserRequest.java:272) at hudson.remoting.UserRequest.<init>(UserRequest.java:101) at hudson.remoting.Channel.call(Channel.java:998) ... 15 more Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
