See
<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/6434/display/redirect?page=changes>
Changes:
[Moritz Mack] Annotate stateful VR test in TestStreamTest with UsesStatefulParDo
[Moritz Mack] Properly close Spark (streaming) context if Pipeline translation
fails
------------------------------------------
[...truncated 745.07 KB...]
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:115)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
at
org.apache.beam.sdk.testutils.metrics.TimeMonitor.processElement(TimeMonitor.java:42)
at
org.apache.beam.sdk.testutils.metrics.TimeMonitor$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
at
org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:49)
at
org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.runReadLoop(ReadOperation.java:218)
at
org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.start(ReadOperation.java:169)
at
org.apache.beam.runners.dataflow.****.util.common.****.MapTaskExecutor.execute(MapTaskExecutor.java:83)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:420)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.doWork(BatchDataflowWorker.java:389)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:314)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:140)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:120)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:107)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
[CIRCULAR
REFERENCE:org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/.temp-beam-2a89c367-ee20-4cfe-b6b4-4378b39f84ff/2f1cc6b1be698014-1bc3-455b-bec1-5fc0a41655fd
could only be replicated to 0 nodes instead of minReplication (=1). There are
2 datanode(s) running and 2 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
]
Sep 13, 2022 9:31:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2022-09-13T09:31:29.109Z:
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/.temp-beam-2a89c367-ee20-4cfe-b6b4-4378b39f84ff/714b7731d8fff831-992f-48eb-a1f1-64f5980b1086
could only be replicated to 0 nodes instead of minReplication (=1). There are
2 datanode(s) running and 2 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1560)
at org.apache.hadoop.ipc.Client.call(Client.java:1506)
at org.apache.hadoop.ipc.Client.call(Client.java:1403)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:118)
at com.sun.proxy.$Proxy107.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:448)
at sun.reflect.GeneratedMethodAccessor69.invoke(Unknown Source)
at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:433)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:166)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:158)
at
org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:96)
at
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:362)
at com.sun.proxy.$Proxy108.addBlock(Unknown Source)
at
org.apache.hadoop.hdfs.DataStreamer.locateFollowingBlock(DataStreamer.java:1846)
at
org.apache.hadoop.hdfs.DataStreamer.nextBlockOutputStream(DataStreamer.java:1645)
at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:710)
Suppressed: java.io.IOException: Failed closing channel to
hdfs://34.170.79.19:9000/.temp-beam-2a89c367-ee20-4cfe-b6b4-4378b39f84ff/714b7731d8fff831-992f-48eb-a1f1-64f5980b1086
at
org.apache.beam.sdk.io.FileBasedSink$Writer.close(FileBasedSink.java:1076)
at
org.apache.beam.sdk.io.WriteFiles.writeOrClose(WriteFiles.java:634)
at
org.apache.beam.sdk.io.WriteFiles.access$1100(WriteFiles.java:125)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn.processElement(WriteFiles.java:602)
at
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.AssignWindowsParDoFnFactory$AssignWindowsParDoFn.processElement(AssignWindowsParDoFnFactory.java:115)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
at
org.apache.beam.sdk.testutils.metrics.TimeMonitor.processElement(TimeMonitor.java:42)
at
org.apache.beam.sdk.testutils.metrics.TimeMonitor$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn$1.output(SimpleParDoFn.java:285)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.outputWindowedValue(SimpleDoFnRunner.java:275)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.access$900(SimpleDoFnRunner.java:85)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:423)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner$DoFnProcessContext.output(SimpleDoFnRunner.java:411)
at
org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn.processElement(FileBasedIOITHelper.java:49)
at
org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:211)
at
org.apache.beam.runners.dataflow.****.repackaged.org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:188)
at
org.apache.beam.runners.dataflow.****.SimpleParDoFn.processElement(SimpleParDoFn.java:340)
at
org.apache.beam.runners.dataflow.****.util.common.****.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.****.util.common.****.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.runReadLoop(ReadOperation.java:218)
at
org.apache.beam.runners.dataflow.****.util.common.****.ReadOperation.start(ReadOperation.java:169)
at
org.apache.beam.runners.dataflow.****.util.common.****.MapTaskExecutor.execute(MapTaskExecutor.java:83)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:420)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.doWork(BatchDataflowWorker.java:389)
at
org.apache.beam.runners.dataflow.****.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:314)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:140)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:120)
at
org.apache.beam.runners.dataflow.****.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:107)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
[CIRCULAR
REFERENCE:org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/.temp-beam-2a89c367-ee20-4cfe-b6b4-4378b39f84ff/714b7731d8fff831-992f-48eb-a1f1-64f5980b1086
could only be replicated to 0 nodes instead of minReplication (=1). There are
2 datanode(s) running and 2 node(s) are excluded in this operation.
at
org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1550)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getNewBlockTargets(FSNamesystem.java:3110)
at
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3034)
at
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:723)
at
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:492)
at
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
]
Sep 13, 2022 9:31:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2022-09-13T09:31:29.611Z: Finished operation Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Collect write start
time+Write content to files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
content to
files/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write
Sep 13, 2022 9:31:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2022-09-13T09:31:29.736Z: Workflow failed. Causes: S02:Generate
sequence/Read(BoundedCountingSource)+Produce text lines+Collect write start
time+Write content to files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write
content to
files/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write
content to
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed.,
The job failed because a work item has failed 4 times. Look in previous log
entries for the cause of each one of the 4 failures. For more information, see
https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was
attempted on these ****s:
textioit0writethenreadall-09130223-b6rs-harness-j5dg
Root cause: Work item failed.,
textioit0writethenreadall-09130223-b6rs-harness-2t1h
Root cause: Work item failed.,
textioit0writethenreadall-09130223-b6rs-harness-tb83
Root cause: Work item failed.,
textioit0writethenreadall-09130223-b6rs-harness-j5dg
Root cause: Work item failed.
Sep 13, 2022 9:31:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2022-09-13T09:31:29.865Z: Cleaning up.
Sep 13, 2022 9:31:31 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2022-09-13T09:31:30.038Z: Stopping **** pool...
Sep 13, 2022 9:33:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2022-09-13T09:33:44.567Z: Autoscaling: Resized **** pool from 5 to 0.
Sep 13, 2022 9:33:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2022-09-13T09:33:44.638Z: Worker pool stopped.
Sep 13, 2022 9:34:20 AM
org.apache.beam.runners.dataflow.DataflowPipelineJob logTerminalState
INFO: Job 2022-09-13_02_23_55-5441299190629063359 failed with status FAILED.
Sep 13, 2022 9:34:20 AM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.663061159377E9 to -1.0.
Sep 13, 2022 9:34:20 AM org.apache.beam.sdk.testutils.NamedTestResult create
WARNING: Reset invalid NamedTestResult value -1.663061159377E9 to -1.0.
org.apache.beam.sdk.io.text.TextIOIT > writeThenReadAll STANDARD_OUT
Load test results for test (ID): bcd74442-627b-434b-ace1-a4a189dce1a6 and
timestamp: 2022-09-13T09:34:20.642000000Z:
Metric: Value:
dataset_size 1.09784E9
read_time 0.0
write_time -1.0
run_time -1.0
Gradle Test Executor 8 finished executing tests.
> Task :sdks:java:io:file-based-io-tests:integrationTest FAILED
org.apache.beam.sdk.io.text.TextIOIT > writeThenReadAll FAILED
java.lang.AssertionError: Values should be different. Actual: FAILED
at org.junit.Assert.fail(Assert.java:89)
at org.junit.Assert.failEquals(Assert.java:187)
at org.junit.Assert.assertNotEquals(Assert.java:163)
at org.junit.Assert.assertNotEquals(Assert.java:177)
at
org.apache.beam.sdk.io.text.TextIOIT.writeThenReadAll(TextIOIT.java:158)
1 test completed, 1 failed
Finished generating test XML results (0.042 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.048 secs) into:
<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>
:sdks:java:io:file-based-io-tests:integrationTest (Thread[included
builds,5,main]) completed. Took 10 mins 42.867 secs.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':sdks:java:io:file-based-io-tests:integrationTest'.
> There were failing tests. See the report at:
> file://<https://ci-beam.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --debug option to get more log output.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.5.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 13m 10s
153 actionable tasks: 105 executed, 46 from cache, 2 up-to-date
Publishing build scan...
https://gradle.com/s/e6l3i5aylau2s
Stopped 7 **** daemon(s).
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]