See 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/267/display/redirect?page=changes>

Changes:

[ajamato] Add new metrics protos based on s.apache.org/beam-fn-api-metrics

------------------------------------------
[...truncated 365.43 KB...]
        at 
com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:391)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:360)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:288)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting 
for a server that matches ReadPreferenceServerSelector{readPreference=primary}. 
Client view of cluster state is {type=UNKNOWN, 
servers=[{address=35.226.50.1:27017, type=UNKNOWN, state=CONNECTING, 
exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, 
caused by {java.net.SocketTimeoutException: connect timed out}}]
        at 
com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at 
com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at 
com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at 
com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
        at 
com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
        at com.mongodb.Mongo.execute(Mongo.java:772)
        at com.mongodb.Mongo$2.execute(Mongo.java:759)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
        at 
org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:391)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:360)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:288)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting 
for a server that matches ReadPreferenceServerSelector{readPreference=primary}. 
Client view of cluster state is {type=UNKNOWN, 
servers=[{address=35.226.50.1:27017, type=UNKNOWN, state=CONNECTING, 
exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, 
caused by {java.net.SocketTimeoutException: connect timed out}}]
        at 
com.mongodb.connection.BaseCluster.createTimeoutException(BaseCluster.java:369)
        at com.mongodb.connection.BaseCluster.selectServer(BaseCluster.java:101)
        at 
com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:75)
        at 
com.mongodb.binding.ClusterBinding$ClusterBindingConnectionSource.<init>(ClusterBinding.java:71)
        at 
com.mongodb.binding.ClusterBinding.getReadConnectionSource(ClusterBinding.java:63)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:89)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
        at 
com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
        at com.mongodb.Mongo.execute(Mongo.java:772)
        at com.mongodb.Mongo$2.execute(Mongo.java:759)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
        at 
org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:391)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:360)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:288)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
    com.mongodb.MongoCommandException: Command failed with error 26: 'ns not 
found' on server 35.226.50.1:27017. The full response is { "ok" : 0.0, "errmsg" 
: "ns not found", "code" : 26, "codeName" : "NamespaceNotFound" }
        at 
com.mongodb.connection.ProtocolHelper.getCommandFailureException(ProtocolHelper.java:115)
        at 
com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:114)
        at 
com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
        at 
com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
        at 
com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:215)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:186)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:178)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:91)
        at 
com.mongodb.operation.CommandOperationHelper.executeWrappedCommandProtocol(CommandOperationHelper.java:84)
        at 
com.mongodb.operation.CommandReadOperation.execute(CommandReadOperation.java:55)
        at com.mongodb.Mongo.execute(Mongo.java:772)
        at com.mongodb.Mongo$2.execute(Mongo.java:759)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:130)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:124)
        at com.mongodb.MongoDatabaseImpl.runCommand(MongoDatabaseImpl.java:114)
        at 
org.apache.beam.sdk.io.mongodb.MongoDbIO$BoundedMongoDbSource.split(MongoDbIO.java:332)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.splitAndValidate(WorkerCustomSources.java:275)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitTyped(WorkerCustomSources.java:197)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplitWithApiLimit(WorkerCustomSources.java:181)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSources.performSplit(WorkerCustomSources.java:160)
        at 
com.google.cloud.dataflow.worker.WorkerCustomSourceOperationExecutor.execute(WorkerCustomSourceOperationExecutor.java:77)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.executeWork(BatchDataflowWorker.java:391)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.doWork(BatchDataflowWorker.java:360)
        at 
com.google.cloud.dataflow.worker.BatchDataflowWorker.getAndPerformWork(BatchDataflowWorker.java:288)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:134)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:114)
        at 
com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:101)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
    Workflow failed. Causes: S03:Read all 
documents/Read(BoundedMongoDbSource)+Map documents to Strings/Map+Calculate 
hashcode/WithKeys/AddKeys/Map+Calculate 
hashcode/Combine.perKey(Hashing)/GroupByKey+Calculate 
hashcode/Combine.perKey(Hashing)/Combine.GroupedValues/Partial+Calculate 
hashcode/Combine.perKey(Hashing)/GroupByKey/Reify+Calculate 
hashcode/Combine.perKey(Hashing)/GroupByKey/Write failed.
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:134)
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:90)
        at 
org.apache.beam.runners.dataflow.TestDataflowRunner.run(TestDataflowRunner.java:55)
        at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:348)
        at org.apache.beam.sdk.testing.TestPipeline.run(TestPipeline.java:329)
        at 
org.apache.beam.sdk.io.mongodb.MongoDBIOIT.testWriteAndRead(MongoDBIOIT.java:145)

org.apache.beam.sdk.io.mongodb.MongoDBIOIT STANDARD_ERROR
    Jun 05, 2018 6:20:40 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Cluster created with settings {hosts=[35.226.50.1:27017], 
mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', 
maxWaitQueueSize=500}
    Jun 05, 2018 6:20:40 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: No server chosen by WritableServerSelector from cluster description 
ClusterDescription{type=UNKNOWN, connectionMode=SINGLE, 
all=[ServerDescription{address=35.226.50.1:27017, type=UNKNOWN, 
state=CONNECTING}]}. Waiting for 30000 ms before timing out
    Jun 05, 2018 6:20:40 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Opened connection [connectionId{localValue:3, serverValue:4}] to 
35.226.50.1:27017
    Jun 05, 2018 6:20:40 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Monitor thread successfully connected to server with description 
ServerDescription{address=35.226.50.1:27017, type=STANDALONE, state=CONNECTED, 
ok=true, version=ServerVersion{versionList=[3, 6, 5]}, minWireVersion=0, 
maxWireVersion=6, maxDocumentSize=16777216, roundTripTimeNanos=1697517}
    Jun 05, 2018 6:20:40 PM com.mongodb.diagnostics.logging.SLF4JLogger info
    INFO: Opened connection [connectionId{localValue:4, serverValue:5}] to 
35.226.50.1:27017

Gradle Test Executor 1 finished executing tests.

> Task :beam-sdks-java-io-mongodb:integrationTest FAILED

1 test completed, 1 failed
Finished generating test XML results (0.027 secs) into: 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/sdks/java/io/mongodb/build/test-results/integrationTest>
Generating HTML test report...
Finished generating test html results (0.038 secs) into: 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/sdks/java/io/mongodb/build/reports/tests/integrationTest>
:beam-sdks-java-io-mongodb:integrationTest (Thread[Task worker for ':' Thread 
8,5,main]) completed. Took 12 mins 57.37 secs.

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 5.0.
See 
https://docs.gradle.org/4.7/userguide/command_line_interface.html#sec:command_line_warnings

BUILD FAILED in 13m 8s
62 actionable tasks: 1 executed, 61 up-to-date

Publishing build scan...
https://gradle.com/s/5xsowbv4p56tm


STDERR: 
FAILURE: Build failed with an exception.

* What went wrong:
Execution failed for task ':beam-sdks-java-io-mongodb:integrationTest'.
> There were failing tests. See the report at: 
> file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>

* Try:
Run with --debug option to get more log output. Run with --scan to get full 
insights.

* Exception is:
org.gradle.api.tasks.TaskExecutionException: Execution failed for task 
':beam-sdks-java-io-mongodb:integrationTest'.
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:103)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.execute(ExecuteActionsTaskExecuter.java:73)
        at 
org.gradle.api.internal.tasks.execution.OutputDirectoryCreatingTaskExecuter.execute(OutputDirectoryCreatingTaskExecuter.java:51)
        at 
org.gradle.api.internal.tasks.execution.SkipCachedTaskExecuter.execute(SkipCachedTaskExecuter.java:105)
        at 
org.gradle.api.internal.tasks.execution.SkipUpToDateTaskExecuter.execute(SkipUpToDateTaskExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.ResolveTaskOutputCachingStateExecuter.execute(ResolveTaskOutputCachingStateExecuter.java:54)
        at 
org.gradle.api.internal.tasks.execution.ResolveBuildCacheKeyExecuter.execute(ResolveBuildCacheKeyExecuter.java:66)
        at 
org.gradle.api.internal.tasks.execution.ValidatingTaskExecuter.execute(ValidatingTaskExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.SkipEmptySourceFilesTaskExecuter.execute(SkipEmptySourceFilesTaskExecuter.java:101)
        at 
org.gradle.api.internal.tasks.execution.FinalizeInputFilePropertiesTaskExecuter.execute(FinalizeInputFilePropertiesTaskExecuter.java:44)
        at 
org.gradle.api.internal.tasks.execution.CleanupStaleOutputsExecuter.execute(CleanupStaleOutputsExecuter.java:91)
        at 
org.gradle.api.internal.tasks.execution.ResolveTaskArtifactStateTaskExecuter.execute(ResolveTaskArtifactStateTaskExecuter.java:62)
        at 
org.gradle.api.internal.tasks.execution.SkipTaskWithNoActionsExecuter.execute(SkipTaskWithNoActionsExecuter.java:59)
        at 
org.gradle.api.internal.tasks.execution.SkipOnlyIfTaskExecuter.execute(SkipOnlyIfTaskExecuter.java:54)
        at 
org.gradle.api.internal.tasks.execution.ExecuteAtMostOnceTaskExecuter.execute(ExecuteAtMostOnceTaskExecuter.java:43)
        at 
org.gradle.api.internal.tasks.execution.CatchExceptionTaskExecuter.execute(CatchExceptionTaskExecuter.java:34)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker$1.run(DefaultTaskGraphExecuter.java:256)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
        at 
org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:249)
        at 
org.gradle.execution.taskgraph.DefaultTaskGraphExecuter$EventFiringTaskWorker.execute(DefaultTaskGraphExecuter.java:238)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:104)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker$1.execute(DefaultTaskPlanExecutor.java:98)
        at 
org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.execute(DefaultTaskExecutionPlan.java:663)
        at 
org.gradle.execution.taskgraph.DefaultTaskExecutionPlan.executeWithTask(DefaultTaskExecutionPlan.java:596)
        at 
org.gradle.execution.taskgraph.DefaultTaskPlanExecutor$TaskExecutorWorker.run(DefaultTaskPlanExecutor.java:98)
        at 
org.gradle.internal.concurrent.ExecutorPolicy$CatchAndRecordFailures.onExecute(ExecutorPolicy.java:63)
        at 
org.gradle.internal.concurrent.ManagedExecutorImpl$1.run(ManagedExecutorImpl.java:46)
        at 
org.gradle.internal.concurrent.ThreadFactoryImpl$ManagedThreadRunnable.run(ThreadFactoryImpl.java:55)
Caused by: org.gradle.api.GradleException: There were failing tests. See the 
report at: 
file://<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/sdks/java/io/mongodb/build/reports/tests/integrationTest/index.html>
        at 
org.gradle.api.tasks.testing.AbstractTestTask.handleTestFailures(AbstractTestTask.java:612)
        at 
org.gradle.api.tasks.testing.AbstractTestTask.executeTests(AbstractTestTask.java:484)
        at org.gradle.api.tasks.testing.Test.executeTests(Test.java:583)
        at org.gradle.internal.reflect.JavaMethod.invoke(JavaMethod.java:73)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.doExecute(StandardTaskAction.java:46)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:39)
        at 
org.gradle.api.internal.project.taskfactory.StandardTaskAction.execute(StandardTaskAction.java:26)
        at 
org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:794)
        at 
org.gradle.api.internal.AbstractTask$TaskActionWrapper.execute(AbstractTask.java:761)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter$1.run(ExecuteActionsTaskExecuter.java:124)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:317)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor$RunnableBuildOperationWorker.execute(DefaultBuildOperationExecutor.java:309)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.execute(DefaultBuildOperationExecutor.java:185)
        at 
org.gradle.internal.operations.DefaultBuildOperationExecutor.run(DefaultBuildOperationExecutor.java:97)
        at 
org.gradle.internal.operations.DelegatingBuildOperationExecutor.run(DelegatingBuildOperationExecutor.java:31)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeAction(ExecuteActionsTaskExecuter.java:113)
        at 
org.gradle.api.internal.tasks.execution.ExecuteActionsTaskExecuter.executeActions(ExecuteActionsTaskExecuter.java:95)
        ... 31 more


* Get more help at https://help.gradle.org

2018-06-05 18:20:41,737 f91d60f8 MainThread beam_integration_benchmark(1/1) 
ERROR    Error during benchmark beam_integration_benchmark
Traceback (most recent call last):
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py";,>
 line 159, in Run
    job_type=job_type)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py";,>
 line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-06-05 18:20:41,739 f91d60f8 MainThread beam_integration_benchmark(1/1) 
INFO     Cleaning up benchmark beam_integration_benchmark
2018-06-05 18:20:41,739 f91d60f8 MainThread beam_integration_benchmark(1/1) 
INFO     Running: kubectl 
--kubeconfig=<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/config-mongodbioit-267>
 delete -f 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/src/.test-infra/kubernetes/mongodb/load-balancer/mongo.yml>
 --ignore-not-found
2018-06-05 18:20:42,598 f91d60f8 MainThread beam_integration_benchmark(1/1) 
ERROR    Exception running benchmark
Traceback (most recent call last):
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 801, in RunBenchmarkTask
    RunBenchmark(spec, collector)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 667, in RunBenchmark
    DoRunPhase(spec, collector, detailed_timer)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/pkb.py";,>
 line 547, in DoRunPhase
    samples = spec.BenchmarkRun(spec)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/linux_benchmarks/beam_integration_benchmark.py";,>
 line 159, in Run
    job_type=job_type)
  File 
"<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/PerfKitBenchmarker/perfkitbenchmarker/providers/gcp/gcp_dpb_dataflow.py";,>
 line 90, in SubmitJob
    assert retcode == 0, "Integration Test Failed."
AssertionError: Integration Test Failed.
2018-06-05 18:20:42,599 f91d60f8 MainThread beam_integration_benchmark(1/1) 
ERROR    Benchmark 1/1 beam_integration_benchmark (UID: 
beam_integration_benchmark0) failed. Execution will continue.
2018-06-05 18:20:42,613 f91d60f8 MainThread INFO     Benchmark run statuses:
---------------------------------------------------------------------------------
Name                        UID                          Status  Failed 
Substatus
---------------------------------------------------------------------------------
beam_integration_benchmark  beam_integration_benchmark0  FAILED                 
 
---------------------------------------------------------------------------------
Success rate: 0.00% (0/1)
2018-06-05 18:20:42,614 f91d60f8 MainThread INFO     Complete logs can be found 
at: 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f91d60f8/pkb.log>
2018-06-05 18:20:42,614 f91d60f8 MainThread INFO     Completion statuses can be 
found at: 
<https://builds.apache.org/job/beam_PerformanceTests_MongoDBIO_IT/ws/runs/f91d60f8/completion_statuses.json>
Build step 'Execute shell' marked build as failure

Reply via email to