See 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/120/display/redirect>

------------------------------------------
[...truncated 703.46 KB...]
        at 
org.apache.beam.sdk.io.common.FileBasedIOITHelper$DeterministicallyConstructTestTextLineFn$DoFnInvoker.invokeProcessElement(Unknown
 Source)
        at 
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:177)
        at 
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:141)
        at 
com.google.cloud.dataflow.worker.SimpleParDoFn.processElement(SimpleParDoFn.java:323)
        at 
com.google.cloud.dataflow.worker.util.common.worker.ParDoOperation.process(ParDoOperation.java:43)
        at 
com.google.cloud.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:48)
        at 
com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:200)
        at 
com.google.cloud.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:158)
        at 
com.google.cloud.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:75)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:383)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:355)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:286)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
    
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException):
 Cannot create 
file/.temp-beam-2018-05-02_12-04-13-0/9640924c-fa98-4d9d-8f6b-3b1e98160db3. 
Name node is in safe mode.
    The reported blocks 0 needs additional 31 blocks to reach the threshold 
0.9990 of total blocks 31.
    The number of live datanodes 0 has reached the minimum number 0. Safe mode 
will be turned off automatically once the thresholds have been reached.
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1327)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2447)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2335)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:623)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:397)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

        at org.apache.hadoop.ipc.Client.call(Client.java:1475)
        at org.apache.hadoop.ipc.Client.call(Client.java:1412)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
        at com.sun.proxy.$Proxy65.create(Unknown Source)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
        at com.sun.proxy.$Proxy66.create(Unknown Source)
        at 
org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1648)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1689)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1624)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
        at 
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
        at 
org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
        at 
org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
        at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:249)
        at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:236)
        at 
org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
        at 
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
    
org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException):
 Cannot create 
file/.temp-beam-2018-05-02_12-04-13-0/fa20752e-15fd-4d79-abb4-8c99520c36e0. 
Name node is in safe mode.
    The reported blocks 0 needs additional 31 blocks to reach the threshold 
0.9990 of total blocks 31.
    The number of live datanodes 0 has reached the minimum number 0. Safe mode 
will be turned off automatically once the thresholds have been reached.
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1327)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFileInt(FSNamesystem.java:2447)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startFile(FSNamesystem.java:2335)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.create(NameNodeRpcServer.java:623)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.create(ClientNamenodeProtocolServerSideTranslatorPB.java:397)
        at 
org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
        at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
        at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

        at org.apache.hadoop.ipc.Client.call(Client.java:1475)
        at org.apache.hadoop.ipc.Client.call(Client.java:1412)
        at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
        at com.sun.proxy.$Proxy65.create(Unknown Source)
        at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:296)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
        at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
        at com.sun.proxy.$Proxy66.create(Unknown Source)
        at 
org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1648)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1689)
        at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1624)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:448)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem$7.doCall(DistributedFileSystem.java:444)
        at 
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:459)
        at 
org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:387)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:911)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:892)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:789)
        at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:778)
        at 
org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:109)
        at 
org.apache.beam.sdk.io.hdfs.HadoopFileSystem.create(HadoopFileSystem.java:68)
        at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:249)
        at org.apache.beam.sdk.io.FileSystems.create(FileSystems.java:236)
        at 
org.apache.beam.sdk.io.FileBasedSink$Writer.open(FileBasedSink.java:923)
        at 
org.apache.beam.sdk.io.WriteFiles$WriteUnshardedTempFilesWithSpillingFn.processElement(WriteFiles.java:503)
    Workflow failed. Causes: S02:Generate 
sequence/Read(BoundedCountingSource)+Produce text lines+Write content to 
files/WriteFiles/RewindowIntoGlobal/Window.Assign+Write content to 
files/WriteFiles/WriteUnshardedBundlesToTempFiles/WriteUnshardedBundles+Write 
content to 
files/WriteFiles/GatherTempFileResults/View.AsList/ParDo(ToIsmRecordForGlobalWindow)+Write
 content to 
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Reify+Write 
content to 
files/WriteFiles/WriteUnshardedBundlesToTempFiles/GroupUnwritten/Write failed., 
A work item was attempted 4 times without success. Each time the worker 
eventually lost contact with the service. The work item was attempted on: 
      textioit0writethenreadall-05020504-2bm7-harness-dg8g,
      textioit0writethenreadall-05020504-2bm7-harness-dg8g,
      textioit0writethenreadall-05020504-2bm7-harness-dg8g,
      textioit0writethenreadall-05020504-2bm7-harness-f4vh
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:346)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:328)
        at 
org.apache.beam.sdk.io.text.TextIOIT.writeThenReadAll(TextIOIT.java:114)

1 test completed, 1 failed
Finished generating test XML results (0.06 secs) into: 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/beam/sdks/java/io/file-based-io-tests/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.05 secs) into: 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/beam/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest>
:beam-sdks-java-io-file-based-io-tests:integrationTest (Thread[Task worker for 
':' Thread 5,5,main]) completed. Took 8 mins 7.719 secs.

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 5.0.
See 
https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 9m 54s
63 actionable tasks: 40 executed, 23 up-to-date

Publishing build scan...
https://gradle.com/s/42uaxt337iang


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task 
':beam-sdks-java-io-file-based-io-tests:integrationTest'.
> There were failing tests. See the report at: 
> file://<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/beam/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full 
insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task 
':beam-sdks-java-io-file-based-io-tests:integrationTest'.
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
        at 
org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
        at 
org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
        at 
org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
        at 
org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
        at 
org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
        at 
org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
        at 
org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
        at 
org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
        at 
org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
        at 
org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
        at 
org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
        at 
org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
        at 
org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
        at 
org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
        at 
org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
        at 
org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
        at 
org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the 
report at: 
file://<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/beam/sdks/java/io/file-based-io-tests/build/reports/tests/integrationTest/index.html>
        at 
org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
        at 
org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
        at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
        at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
        at 
org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
        at 
org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
        at 
org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
        ... 31 more


* Get more help at https://help.gradle.org

2018-05-02 12:12:19,813 24915fd8 MainThread beam_integration_benchmark(1/1) 
ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py";,>
 line 159, in Run
    job_type=job_type)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py";,>
 line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 12:12:19,814 24915fd8 MainThread beam_integration_benchmark(1/1) 
INFO     Cleaning up benchmark beam_integration_benchmark
2018-05-02 12:12:19,815 24915fd8 MainThread beam_integration_benchmark(1/1) 
INFO     Running: kubectl 
--kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-filebasedioithdfs-1525255264140>
 delete -f 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster.yml>
 --ignore-not-found
2018-05-02 12:12:20,498 24915fd8 MainThread beam_integration_benchmark(1/1) 
INFO     Running: kubectl 
--kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/config-filebasedioithdfs-1525255264140>
 delete -f 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/src/.test-infra/kubernetes/hadoop/SmallITCluster/hdfs-single-datanode-cluster-for-local-dev.yml>
 --ignore-not-found
2018-05-02 12:12:20,750 24915fd8 MainThread beam_integration_benchmark(1/1) 
ERROR    Exception running benchmark
Traceback (most recent call last):
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 780, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 646, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 526, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py";,>
 line 159, in Run
    job_type=job_type)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py";,>
 line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-05-02 12:12:20,751 24915fd8 MainThread beam_integration_benchmark(1/1) 
ERROR    Benchmark 1/1 beam_integration_benchmark (UID: 
beam_integration_benchmark0) failed. Execution will continue.
2018-05-02 12:12:20,776 24915fd8 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed 
Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                 
 
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-05-02 12:12:20,777 24915fd8 MainThread INFO     Complete logs can be found 
at: 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/pkb.log>
2018-05-02 12:12:20,777 24915fd8 MainThread INFO     Completion statuses can be 
found at: 
<https://builds.apache.org/job/beam_PerformanceTests_Compressed_TextIOIT_HDFS/ws/runs/24915fd8/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Reply via email to