See
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/6776/display/redirect>
Changes:
------------------------------------------
[...truncated 539.22 KB...]
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Apr 28, 2023 7:51:55 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-04-28T07:51:54.203Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1072)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:631)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1682667933638/.temp-beam/46c27a5eef7df89d-2513-43e0-bf85-e37d1961f337
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Apr 28, 2023 7:51:55 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-04-28T07:51:54.323Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1072)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:631)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1682667933638/.temp-beam/625383c63121489b-5c9a-4d97-979f-01913cb5a2df
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Apr 28, 2023 7:52:07 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-04-28T07:52:06.896Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1072)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:631)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1682667933638/.temp-beam/b56f8a70ca0194be-a0cf-4b64-b570-9c1f5d307ac3
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Apr 28, 2023 7:52:07 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-04-28T07:52:06.955Z: Finished operation Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Produce Avro
records+Gather write start times+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
Parquet files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
Parquet files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
Apr 28, 2023 7:52:07 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-04-28T07:52:07.063Z: Workflow failed. Causes: S03:Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Produce Avro
records+Gather write start times+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
Parquet files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
Parquet files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
failed., The job failed because a work item has failed 4 times. Look in
previous log entries for the cause of each one of the 4 failures. If the logs
only contain generic timeout errors related to accessing external resources,
such as MongoDB, verify that the **** service account has permission to access
the resource's subnetwork. For more information, see
https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was
attempted on these ****s:
Root cause: Work item failed.
Worker ID: parquetioit0writethenread-04280045-z0ky-harness-fjt8,
Root cause: Work item failed.
Worker ID: parquetioit0writethenread-04280045-z0ky-harness-fjt8,
Root cause: Work item failed.
Worker ID: parquetioit0writethenread-04280045-z0ky-harness-fjt8,
Root cause: Work item failed.
Worker ID: parquetioit0writethenread-04280045-z0ky-harness-fjt8
Apr 28, 2023 7:52:07 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-04-28T07:52:07.141Z: Cleaning up.
Apr 28, 2023 7:52:07 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-04-28T07:52:07.240Z: Stopping **** pool...
Apr 28, 2023 7:54:21 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-04-28T07:54:19.715Z: Autoscaling: Resized **** pool from 5 to 0.
Apr 28, 2023 7:54:21 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-04-28T07:54:19.779Z: Worker pool stopped.
Apr 28, 2023 7:54:56 AM
org.apache.beam.runners.dataflow.DataflowPipelineJob logTerminalState
INFO: Job 2023-04-28_00_45_50-15699028482457585577 failed with status
FAILED.
Apr 28, 2023 7:54:56 AM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.682668296961E9 to -1.0.
Apr 28, 2023 7:54:56 AM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.682668296961E9 to -1.0.
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll STANDARD_OUT
Load test results for test (ID): 3219d3cb-1814-4d10-afbf-272d05044212 and
timestamp: 2023-04-28T07:54:56.318000000Z:
Metric: Value:
write_time -1.0
dataset_size 1.08737E9
read_time 0.0
run_time -1.0
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll STANDARD_ERROR
ERROR StatusLogger Log4j2 could not find a logging implementation. Please
add log4j-core to the classpath. Using SimpleLogger to log to the console...
Gradle Test Executor 1 finished executing tests.
> Task :sdks:java:io:file-based-io-tests:integrationTest FAILED
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll FAILED
java.lang.AssertionError: Values should be different. Actual: FAILED
at org.junit.Assert.fail(Assert.java:89)
at org.junit.Assert.failEquals(Assert.java:187)
at org.junit.Assert.assertNotEquals(Assert.java:163)
at org.junit.Assert.assertNotEquals(Assert.java:177)
at
org.apache.beam.sdk.io.parquet.ParquetIOIT.writeThenReadAll(ParquetIOIT.java:171)
1 test completed, 1 failed
Finished generating test XML results (0.03 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.04 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>
:sdks:java:io:file-based-io-tests:integrationTest (Thread[Execution **** Thread
6,5,main]) completed. Took 9 mins 29.132 secs.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':sdks:java:io:file-based-io-tests:integrationTest'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --debug option to get more log output.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.5.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 10m 8s
160 actionable tasks: 96 executed, 62 from cache, 2 up-to-date
Build scan background action failed.
java.lang.IllegalArgumentException:
com.gradle.enterprise.gradleplugin.internal.extension.a is not an interface
at java.lang.reflect.Proxy$ProxyClassFactory.apply(Proxy.java:590)
at java.lang.reflect.Proxy$ProxyClassFactory.apply(Proxy.java:557)
at java.lang.reflect.WeakCache$Factory.get(WeakCache.java:230)
at java.lang.reflect.WeakCache.get(WeakCache.java:127)
at java.lang.reflect.Proxy.getProxyClass0(Proxy.java:419)
at java.lang.reflect.Proxy.newProxyInstance(Proxy.java:719)
at
com.gradle.ProxyFactory$ProxyingInvocationHandler.createLocalProxy(ProxyFactory.java:64)
at
com.gradle.ProxyFactory$ProxyingInvocationHandler.lambda$adaptActionArg$1(ProxyFactory.java:59)
at
com.gradle.enterprise.gradleplugin.internal.extension.b$3.run(SourceFile:100)
at
java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:750)
Publishing build scan...
https://gradle.com/s/xev7zirjbmeng
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]