See
<https://builds.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/217/display/redirect>
Changes:
------------------------------------------
[...truncated 334.41 KB...]
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader$1.load(CachingShuffleBatchReader.java:51)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader.read(CachingShuffleBatchReader.java:74)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:125)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntriesIfNeeded(BatchingShuffleEntryReader.java:119)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.hasNext(BatchingShuffleEntryReader.java:84)
at
org.apache.beam.runners.dataflow.worker.util.common.ForwardingReiterator.hasNext(ForwardingReiterator.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.advance(GroupingShuffleEntryIterator.java:271)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.hasNext(GroupingShuffleEntryIterator.java:263)
at
org.apache.beam.runners.dataflow.worker.GroupingShuffleReader$GroupingShuffleReaderIterator$ValuesIterator.hasNext(GroupingShuffleReader.java:397)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:55)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:39)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.invokeProcessElement(GroupAlsoByWindowFnRunner.java:115)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.processElement(GroupAlsoByWindowFnRunner.java:73)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowsParDoFn.processElement(GroupAlsoByWindowsParDoFn.java:114)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:411)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:380)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:305)
Dec 06, 2019 8:00:29 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2019-12-06T08:00:28.245Z: An OutOfMemoryException occurred.
Consider specifying higher memory instances in PipelineOptions.
java.lang.RuntimeException:
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ExecutionError:
java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:132)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntriesIfNeeded(BatchingShuffleEntryReader.java:119)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.hasNext(BatchingShuffleEntryReader.java:84)
at
org.apache.beam.runners.dataflow.worker.util.common.ForwardingReiterator.hasNext(ForwardingReiterator.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.advance(GroupingShuffleEntryIterator.java:271)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.hasNext(GroupingShuffleEntryIterator.java:263)
at
org.apache.beam.runners.dataflow.worker.GroupingShuffleReader$GroupingShuffleReaderIterator$ValuesIterator.hasNext(GroupingShuffleReader.java:397)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:55)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:39)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.invokeProcessElement(GroupAlsoByWindowFnRunner.java:115)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.processElement(GroupAlsoByWindowFnRunner.java:73)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowsParDoFn.processElement(GroupAlsoByWindowsParDoFn.java:114)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:411)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:380)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:305)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:140)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:120)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:107)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ExecutionError:
java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2048)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader.read(CachingShuffleBatchReader.java:74)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:125)
... 26 more
Caused by: java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.getFixedLengthPrefixedByteArray(ChunkingShuffleBatchReader.java:98)
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.getShuffleEntry(ChunkingShuffleBatchReader.java:82)
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.read(ChunkingShuffleBatchReader.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader$1.load(CachingShuffleBatchReader.java:55)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader$1.load(CachingShuffleBatchReader.java:51)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader.read(CachingShuffleBatchReader.java:74)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:125)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntriesIfNeeded(BatchingShuffleEntryReader.java:119)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.hasNext(BatchingShuffleEntryReader.java:84)
at
org.apache.beam.runners.dataflow.worker.util.common.ForwardingReiterator.hasNext(ForwardingReiterator.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.advance(GroupingShuffleEntryIterator.java:271)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.hasNext(GroupingShuffleEntryIterator.java:263)
at
org.apache.beam.runners.dataflow.worker.GroupingShuffleReader$GroupingShuffleReaderIterator$ValuesIterator.hasNext(GroupingShuffleReader.java:397)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:55)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:39)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.invokeProcessElement(GroupAlsoByWindowFnRunner.java:115)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.processElement(GroupAlsoByWindowFnRunner.java:73)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowsParDoFn.processElement(GroupAlsoByWindowsParDoFn.java:114)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:411)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:380)
Dec 06, 2019 8:05:45 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:05:45.028Z: Checking permissions granted to controller
Service Account.
Dec 06, 2019 8:06:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2019-12-06T08:06:42.632Z: An OutOfMemoryException occurred.
Consider specifying higher memory instances in PipelineOptions.
java.lang.RuntimeException:
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ExecutionError:
java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:132)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntriesIfNeeded(BatchingShuffleEntryReader.java:119)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.hasNext(BatchingShuffleEntryReader.java:84)
at
org.apache.beam.runners.dataflow.worker.util.common.ForwardingReiterator.hasNext(ForwardingReiterator.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.advance(GroupingShuffleEntryIterator.java:271)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.hasNext(GroupingShuffleEntryIterator.java:263)
at
org.apache.beam.runners.dataflow.worker.GroupingShuffleReader$GroupingShuffleReaderIterator$ValuesIterator.hasNext(GroupingShuffleReader.java:397)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:55)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:39)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.invokeProcessElement(GroupAlsoByWindowFnRunner.java:115)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.processElement(GroupAlsoByWindowFnRunner.java:73)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowsParDoFn.processElement(GroupAlsoByWindowsParDoFn.java:114)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:411)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:380)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:305)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:140)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:120)
at
org.apache.beam.runners.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:107)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by:
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ExecutionError:
java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2048)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader.read(CachingShuffleBatchReader.java:74)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:125)
... 26 more
Caused by: java.lang.OutOfMemoryError: Java heap space
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.getFixedLengthPrefixedByteArray(ChunkingShuffleBatchReader.java:98)
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.getShuffleEntry(ChunkingShuffleBatchReader.java:82)
at
org.apache.beam.runners.dataflow.worker.ChunkingShuffleBatchReader.read(ChunkingShuffleBatchReader.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader$1.load(CachingShuffleBatchReader.java:55)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader$1.load(CachingShuffleBatchReader.java:51)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3528)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2277)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2154)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2044)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.get(LocalCache.java:3952)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3974)
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4958)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.CachingShuffleBatchReader.read(CachingShuffleBatchReader.java:74)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntries(BatchingShuffleEntryReader.java:125)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.fillEntriesIfNeeded(BatchingShuffleEntryReader.java:119)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.BatchingShuffleEntryReader$ShuffleReadIterator.hasNext(BatchingShuffleEntryReader.java:84)
at
org.apache.beam.runners.dataflow.worker.util.common.ForwardingReiterator.hasNext(ForwardingReiterator.java:63)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.advance(GroupingShuffleEntryIterator.java:271)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.GroupingShuffleEntryIterator$ValuesIterator.hasNext(GroupingShuffleEntryIterator.java:263)
at
org.apache.beam.runners.dataflow.worker.GroupingShuffleReader$GroupingShuffleReaderIterator$ValuesIterator.hasNext(GroupingShuffleReader.java:397)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:55)
at
org.apache.beam.runners.dataflow.worker.util.BatchGroupAlsoByWindowReshuffleFn.processElement(BatchGroupAlsoByWindowReshuffleFn.java:39)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.invokeProcessElement(GroupAlsoByWindowFnRunner.java:115)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowFnRunner.processElement(GroupAlsoByWindowFnRunner.java:73)
at
org.apache.beam.runners.dataflow.worker.GroupAlsoByWindowsParDoFn.processElement(GroupAlsoByWindowsParDoFn.java:114)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:44)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:411)
at
org.apache.beam.runners.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:380)
Dec 06, 2019 8:06:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:06:42.661Z: Finished operation Write to
BQ/StreamingInserts/StreamingWriteTables/Reshuffle/GroupByKey/Read+Write to
BQ/StreamingInserts/StreamingWriteTables/Reshuffle/GroupByKey/GroupByWindow+Write
to BQ/StreamingInserts/StreamingWriteTables/Reshuffle/ExpandIterable+Write to
BQ/StreamingInserts/StreamingWriteTables/GlobalWindow/Window.Assign+Write to
BQ/StreamingInserts/StreamingWriteTables/StreamingWrite
Dec 06, 2019 8:06:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
SEVERE: 2019-12-06T08:06:42.785Z: Workflow failed. Causes: S04:Write to
BQ/StreamingInserts/StreamingWriteTables/Reshuffle/GroupByKey/Read+Write to
BQ/StreamingInserts/StreamingWriteTables/Reshuffle/GroupByKey/GroupByWindow+Write
to BQ/StreamingInserts/StreamingWriteTables/Reshuffle/ExpandIterable+Write to
BQ/StreamingInserts/StreamingWriteTables/GlobalWindow/Window.Assign+Write to
BQ/StreamingInserts/StreamingWriteTables/StreamingWrite failed., The job failed
because a work item has failed 4 times. Look in previous log entries for the
cause of each one of the 4 failures. For more information, see
https://cloud.google.com/dataflow/docs/guides/common-errors. The work item was
attempted on these workers:
testpipeline-jenkins-1206-12052323-pj7f-harness-46wn
Root cause: Work item failed.,
testpipeline-jenkins-1206-12052323-pj7f-harness-46wn
Root cause: Work item failed.,
testpipeline-jenkins-1206-12052323-pj7f-harness-81vp
Root cause: The worker lost contact with the service.,
testpipeline-jenkins-1206-12052323-pj7f-harness-46wn
Root cause: Work item failed.
Dec 06, 2019 8:06:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:06:42.983Z: Cleaning up.
Dec 06, 2019 8:06:44 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:06:43.079Z: Stopping worker pool...
Dec 06, 2019 8:10:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:10:30.270Z: Autoscaling: Resized worker pool from 5 to
0.
Dec 06, 2019 8:10:30 AM
org.apache.beam.runners.dataflow.util.MonitoringUtil$LoggingHandler process
INFO: 2019-12-06T08:10:30.316Z: Worker pool stopped.
Dec 06, 2019 8:10:37 AM
org.apache.beam.runners.dataflow.DataflowPipelineJob logTerminalState
INFO: Job 2019-12-05_23_23_32-8134832997422585979 failed with status FAILED.
org.apache.beam.sdk.bigqueryioperftests.BigQueryIOIT > testWriteThenRead
STANDARD_OUT
Load test results for test (ID): 82f06094-fb21-47f0-a4e4-447dae0a4a21 and
timestamp: 2019-12-06T07:23:26.116000000Z:
Metric: Value:
write_time 146.694
org.apache.beam.sdk.bigqueryioperftests.BigQueryIOIT > testWriteThenRead FAILED
java.lang.IllegalArgumentException: Writing avro formatted data is only
supported for FILE_LOADS, however the method was STREAMING_INSERTS
at
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument(Preconditions.java:216)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO$Write.expand(BigQueryIO.java:2394)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO$Write.expand(BigQueryIO.java:1665)
at org.apache.beam.sdk.Pipeline.applyInternal(Pipeline.java:539)
at org.apache.beam.sdk.Pipeline.applyTransform(Pipeline.java:490)
at org.apache.beam.sdk.values.PCollection.apply(PCollection.java:368)
at
org.apache.beam.sdk.bigqueryioperftests.BigQueryIOIT.testWrite(BigQueryIOIT.java:159)
at
org.apache.beam.sdk.bigqueryioperftests.BigQueryIOIT.testAvroWrite(BigQueryIOIT.java:148)
at
org.apache.beam.sdk.bigqueryioperftests.BigQueryIOIT.testWriteThenRead(BigQueryIOIT.java:121)
Gradle Test Executor 1 finished executing tests.
> Task :sdks:java:io:bigquery-io-perf-tests:integrationTest FAILED
1 test completed, 1 failed
Finished generating test XML results (0.052 secs) into:
<https://builds.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/ws/src/sdks/java/io/bigquery-io-perf-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.06 secs) into:
<https://builds.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/ws/src/sdks/java/io/bigquery-io-perf-tests/build/reports/tests/integrationTest>
:sdks:java:io:bigquery-io-perf-tests:integrationTest (Thread[Execution worker
for ':' Thread 11,5,main]) completed. Took 47 mins 14.261 secs.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task
':sdks:java:io:bigquery-io-perf-tests:integrationTest'.
> There were failing tests. See the report at:
> file://<https://builds.apache.org/job/beam_BiqQueryIO_Streaming_Performance_Test_Java/ws/src/sdks/java/io/bigquery-io-perf-tests/build/reports/tests/integrationTest/index.html>
* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to
get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 48m 0s
80 actionable tasks: 54 executed, 26 from cache
Publishing build scan...
https://scans.gradle.com/s/hft2mo5spknbc
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]