See
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/6587/display/redirect>
Changes:
------------------------------------------
[...truncated 520.36 KB...]
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Jan 22, 2023 7:28:56 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-01-22T19:28:54.462Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1043)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:613)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1674415437101/.temp-beam/9a62158b55934e3b-7c8f-4654-a872-7d9ff1a8b23c
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Jan 22, 2023 7:28:56 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-01-22T19:28:54.688Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1043)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:613)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1674415437101/.temp-beam/5d1450f3f04eadb2-e5db-4022-babe-963ba1a7824a
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Jan 22, 2023 7:28:59 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-01-22T19:28:57.988Z: java.lang.IllegalArgumentException:
Self-suppression not permitted
at java.lang.Throwable.addSuppressed(Throwable.java:1043)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.closeChannelAndThrow(FileBasedSink.java:1036)
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1065)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.finishBundle(WriteFiles.java:613)
Caused by: org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/TEXTIO_IT__1674415437101/.temp-beam/48c3d724d5430c06-682f-4ff9-bd95-0c879a81f816
could only be written to 0 of the 1 minReplication nodes. There are 0
datanode(s) running and 0 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:2315)
at
org.apache.hadoop.hdfs.server.namenode.FSDirWriteFileOp.chooseTargetForNewBlock(FSDirWriteFileOp.java:294)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:2960)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:904)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:593)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:604)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:572)
at
org.apache.hadoop.ipc.ProtobufRpcEngine2$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine2.java:556)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1093)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:1043)
at org.apache.hadoop.ipc.Server$RpcCall.run(Server.java:971)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1878)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2976)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy111.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy112.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Jan 22, 2023 7:28:59 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-01-22T19:28:58.046Z: Finished operation Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Produce Avro
records+Gather write start times+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
Parquet files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
Parquet files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
Jan 22, 2023 7:28:59 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2023-01-22T19:28:58.112Z: Workflow failed. Causes: S03:Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Produce Avro
records+Gather write start times+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
Parquet files/WriteFiles/GatherTempFileResults/Add void key/AddKeys/Map+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/Window.Into()/Window.Assign+Write
Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Reify+Write Parquet
files/WriteFiles/GatherTempFileResults/Reshuffle/GroupByKey/Write+Write Parquet
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
Parquet files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
failed., The job failed because a work item has failed 4 times. Look in
previous log entries for the cause of each one of the 4 failures. If the logs
only contain generic timeout errors related to accessing external resources,
such as MongoDB, verify that the **** service account has permission to access
the resource's subnetwork. For more information, see
https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was
attempted on these ****s:
parquetioit0writethenread-01221124-0hq5-harness-41rb
Root cause: Work item failed.,
parquetioit0writethenread-01221124-0hq5-harness-41rb
Root cause: Work item failed.,
parquetioit0writethenread-01221124-0hq5-harness-x6qx
Root cause: Work item failed.,
parquetioit0writethenread-01221124-0hq5-harness-x6qx
Root cause: Work item failed.
Jan 22, 2023 7:28:59 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-01-22T19:28:58.170Z: Cleaning up.
Jan 22, 2023 7:28:59 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-01-22T19:28:58.254Z: Stopping **** pool...
Jan 22, 2023 7:31:16 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-01-22T19:31:13.541Z: Autoscaling: Resized **** pool from 5 to 0.
Jan 22, 2023 7:31:16 PM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2023-01-22T19:31:13.580Z: Worker pool stopped.
Jan 22, 2023 7:31:51 PM
org.apache.beam.runners.dataflow.DataflowPipelineJob logTerminalState
INFO: Job 2023-01-22_11_24_10-1107344009654948501 failed with status FAILED.
Jan 22, 2023 7:31:52 PM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.674415668436E9 to -1.0.
Jan 22, 2023 7:31:52 PM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.674415668436E9 to -1.0.
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll STANDARD_OUT
Load test results for test (ID): 979055c5-401e-48bc-95da-fff874ccb080 and
timestamp: 2023-01-22T19:31:52.039000000Z:
Metric: Value:
write_time -1.0
run_time -1.0
dataset_size 1.08737E9
read_time 0.0
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll STANDARD_ERROR
ERROR StatusLogger Log4j2 could not find a logging implementation. Please
add log4j-core to the classpath. Using SimpleLogger to log to the console...
Gradle Test Executor 1 finished executing tests.
> Task :sdks:java:io:file-based-io-tests:integrationTest FAILED
org.apache.beam.sdk.io.parquet.ParquetIOIT > writeThenReadAll FAILED
java.lang.AssertionError: Values should be different. Actual: FAILED
at org.junit.Assert.fail(Assert.java:89)
at org.junit.Assert.failEquals(Assert.java:187)
at org.junit.Assert.assertNotEquals(Assert.java:163)
at org.junit.Assert.assertNotEquals(Assert.java:177)
at
org.apache.beam.sdk.io.parquet.ParquetIOIT.writeThenReadAll(ParquetIOIT.java:171)
1 test completed, 1 failed
Finished generating test XML results (0.029 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.036 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>
:sdks:java:io:file-based-io-tests:integrationTest (Thread[included
builds,5,main]) completed. Took 8 mins 0.368 secs.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':sdks:java:io:file-based-io-tests:integrationTest'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PerformanceTests_ParquetIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --debug option to get more log output.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.5.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 8m 30s
154 actionable tasks: 92 executed, 60 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/tuv3lusrhtznc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]