See
<https://builds.apache.org/job/beam_PostCommit_Java_Nexmark_Flink/5/display/redirect?page=changes>
Changes:
[ehudm] Jupyter notebook for monitoring precommit job times
------------------------------------------
[...truncated 3.83 MB...]
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.webmonitor.WebMonitorEndpoint
lambda$shutDownInternal$4
INFO: Removing cache directory /tmp/flink-web-ui
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.rpc.akka.AkkaRpcService
stopService
INFO: Stopping Akka RPC service.
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.blob.AbstractBlobCache close
INFO: Shutting down BLOB cache
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.blob.AbstractBlobCache close
INFO: Shutting down BLOB cache
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.blob.BlobServer close
INFO: Stopped BLOB server at 0.0.0.0:43917
Jun 20, 2018 5:44:10 PM org.apache.flink.runtime.rpc.akka.AkkaRpcService
lambda$stopService$4
INFO: Stopped Akka RPC service.
Jun 20, 2018 5:44:10 PM org.apache.beam.runners.flink.FlinkRunner run
SEVERE: Pipeline execution failed
org.apache.flink.runtime.client.JobExecutionException:
org.apache.beam.sdk.util.UserCodeException: java.io.IOException: Unable to
insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.flink.runtime.minicluster.MiniCluster.executeJobBlocking(MiniCluster.java:625)
at
org.apache.flink.client.LocalExecutor.executePlan(LocalExecutor.java:234)
at
org.apache.flink.api.java.LocalEnvironment.execute(LocalEnvironment.java:91)
at
org.apache.beam.runners.flink.FlinkPipelineExecutionEnvironment.executePipeline(FlinkPipelineExecutionEnvironment.java:114)
at org.apache.beam.runners.flink.FlinkRunner.run(FlinkRunner.java:116)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:297)
at org.apache.beam.sdk.nexmark.Main.savePerfsToBigQuery(Main.java:182)
at org.apache.beam.sdk.nexmark.Main.runAll(Main.java:102)
at org.apache.beam.sdk.nexmark.Main.main(Main.java:395)
Caused by: org.apache.beam.sdk.util.UserCodeException: java.io.IOException:
Unable to insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:66)
at
org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction.mapPartition(FlinkDoFnFunction.java:120)
at
org.apache.flink.runtime.operators.MapPartitionDriver.run(MapPartitionDriver.java:103)
at org.apache.flink.runtime.operators.BatchTask.run(BatchTask.java:503)
at
org.apache.flink.runtime.operators.BatchTask.invoke(BatchTask.java:368)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:703)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Unable to insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:231)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:202)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startLoadJob(BigQueryServicesImpl.java:142)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.load(WriteTables.java:269)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.access$600(WriteTables.java:80)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn.processElement(WriteTables.java:159)
Caused by: com.google.api.client.googleapis.json.GoogleJsonResponseException:
400 Bad Request
{
"code" : 400,
"errors" : [ {
"domain" : "global",
"message" : "Invalid field name \"Runtime(sec)\". Fields must contain only
letters, numbers, and underscores, start with a letter or underscore, and be at
most 128 characters long.",
"reason" : "invalid"
} ],
"message" : "Invalid field name \"Runtime(sec)\". Fields must contain only
letters, numbers, and underscores, start with a letter or underscore, and be at
most 128 characters long."
}
at
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:146)
at
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:321)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1065)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:419)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:352)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:469)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:216)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:202)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startLoadJob(BigQueryServicesImpl.java:142)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.load(WriteTables.java:269)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.access$600(WriteTables.java:80)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn.processElement(WriteTables.java:159)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:66)
at
org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction.mapPartition(FlinkDoFnFunction.java:120)
at
org.apache.flink.runtime.operators.MapPartitionDriver.run(MapPartitionDriver.java:103)
at org.apache.flink.runtime.operators.BatchTask.run(BatchTask.java:503)
at
org.apache.flink.runtime.operators.BatchTask.invoke(BatchTask.java:368)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:703)
at java.lang.Thread.run(Thread.java:748)
==========================================================================================
Run started 2018-06-20T17:41:38.301Z and ran for PT151.775S
Default configuration:
{"debug":true,"query":0,"sourceType":"DIRECT","sinkType":"DEVNULL","exportSummaryToBigQuery":false,"pubSubMode":"COMBINED","numEvents":100000,"numEventGenerators":100,"rateShape":"SINE","firstEventRate":10000,"nextEventRate":10000,"rateUnit":"PER_SECOND","ratePeriodSec":600,"preloadSeconds":0,"streamTimeout":240,"isRateLimited":false,"useWallclockEventTime":false,"avgPersonByteSize":200,"avgAuctionByteSize":500,"avgBidByteSize":100,"hotAuctionRatio":2,"hotSellersRatio":4,"hotBiddersRatio":4,"windowSizeSec":10,"windowPeriodSec":5,"watermarkHoldbackSec":0,"numInFlightAuctions":100,"numActivePeople":1000,"coderStrategy":"HAND","cpuDelayMs":0,"diskBusyBytes":0,"auctionSkip":123,"fanout":5,"maxAuctionsWaitingTime":600,"occasionalDelaySec":3,"probDelayedEvent":0.1,"maxLogEvents":100000,"usePubsubPublishTime":false,"outOfOrderGroupSize":1}
Configurations:
Conf Description
0000 query:0; exportSummaryToBigQuery:true; streamTimeout:60
0001 query:1; exportSummaryToBigQuery:true; streamTimeout:60
0002 query:2; exportSummaryToBigQuery:true; streamTimeout:60
0003 query:3; exportSummaryToBigQuery:true; streamTimeout:60
0004 query:4; exportSummaryToBigQuery:true; numEvents:10000; streamTimeout:60
0005 query:5; exportSummaryToBigQuery:true; streamTimeout:60
0006 query:6; exportSummaryToBigQuery:true; numEvents:10000; streamTimeout:60
0007 query:7; exportSummaryToBigQuery:true; streamTimeout:60
0008 query:8; exportSummaryToBigQuery:true; streamTimeout:60
0009 query:9; exportSummaryToBigQuery:true; numEvents:10000; streamTimeout:60
Exception in thread "main" 0010 query:10; exportSummaryToBigQuery:true;
streamTimeout:60
0011 query:11; exportSummaryToBigQuery:true; streamTimeout:60
0012 query:12; exportSummaryToBigQuery:true; streamTimeout:60
Performance:
Conf Runtime(sec) (Baseline) Events(/sec) (Baseline) Results
(Baseline)
0000 0.8 122100.1 100000
0001 0.6 165016.5 92000
0002 0.5 218340.6 351
0003 11.3 8837.0 580
0004 6.5 1528.1 40
0005 6.4 15666.6 12
0006 6.6 1504.9 103
0007 7.0 14251.1 1
0008 8.3 11984.7 6000
0009 5.9 1691.8 298
0010 6.7 15024.0 1
0011 8.1 12336.5 1919
0012 6.2 16084.9 1919
==========================================================================================
java.lang.RuntimeException: Pipeline execution failed
at org.apache.beam.runners.flink.FlinkRunner.run(FlinkRunner.java:119)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:311)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:297)
at org.apache.beam.sdk.nexmark.Main.savePerfsToBigQuery(Main.java:182)
at org.apache.beam.sdk.nexmark.Main.runAll(Main.java:102)
at org.apache.beam.sdk.nexmark.Main.main(Main.java:395)
Caused by: org.apache.flink.runtime.client.JobExecutionException:
org.apache.beam.sdk.util.UserCodeException: java.io.IOException: Unable to
insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.flink.runtime.minicluster.MiniCluster.executeJobBlocking(MiniCluster.java:625)
at
org.apache.flink.client.LocalExecutor.executePlan(LocalExecutor.java:234)
at
org.apache.flink.api.java.LocalEnvironment.execute(LocalEnvironment.java:91)
at
org.apache.beam.runners.flink.FlinkPipelineExecutionEnvironment.executePipeline(FlinkPipelineExecutionEnvironment.java:114)
at org.apache.beam.runners.flink.FlinkRunner.run(FlinkRunner.java:116)
... 5 more
Caused by: org.apache.beam.sdk.util.UserCodeException: java.io.IOException:
Unable to insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:66)
at
org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction.mapPartition(FlinkDoFnFunction.java:120)
at
org.apache.flink.runtime.operators.MapPartitionDriver.run(MapPartitionDriver.java:103)
at org.apache.flink.runtime.operators.BatchTask.run(BatchTask.java:503)
at
org.apache.flink.runtime.operators.BatchTask.invoke(BatchTask.java:368)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:703)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.IOException: Unable to insert job:
beam_load_mainjenkins0620174139e4c5cb3c_c2ee7d3a0dd5423ea360c315b3f90f7e_6831ef916a10e30d9ef1c0e5f0e1ec7d_00001_00000-0,
aborting after 9 .
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:231)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:202)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startLoadJob(BigQueryServicesImpl.java:142)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.load(WriteTables.java:269)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.access$600(WriteTables.java:80)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn.processElement(WriteTables.java:159)
Caused by: com.google.api.client.googleapis.json.GoogleJsonResponseException:
400 Bad Request
{
"code" : 400,
"errors" : [ {
"domain" : "global",
"message" : "Invalid field name \"Runtime(sec)\". Fields must contain only
letters, numbers, and underscores, start with a letter or underscore, and be at
most 128 characters long.",
"reason" : "invalid"
} ],
"message" : "Invalid field name \"Runtime(sec)\". Fields must contain only
letters, numbers, and underscores, start with a letter or underscore, and be at
most 128 characters long."
}
at
com.google.api.client.googleapis.json.GoogleJsonResponseException.from(GoogleJsonResponseException.java:146)
at
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:113)
at
com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest.newExceptionOnError(AbstractGoogleJsonClientRequest.java:40)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest$1.interceptResponse(AbstractGoogleClientRequest.java:321)
at com.google.api.client.http.HttpRequest.execute(HttpRequest.java:1065)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:419)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.executeUnparsed(AbstractGoogleClientRequest.java:352)
at
com.google.api.client.googleapis.services.AbstractGoogleClientRequest.execute(AbstractGoogleClientRequest.java:469)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:216)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startJob(BigQueryServicesImpl.java:202)
at
org.apache.beam.sdk.io.gcp.bigquery.BigQueryServicesImpl$JobServiceImpl.startLoadJob(BigQueryServicesImpl.java:142)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.load(WriteTables.java:269)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables.access$600(WriteTables.java:80)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn.processElement(WriteTables.java:159)
at
org.apache.beam.sdk.io.gcp.bigquery.WriteTables$WriteTablesDoFn$DoFnInvoker.invokeProcessElement(Unknown
Source)
at
org.apache.beam.runners.core.SimpleDoFnRunner.invokeProcessElement(SimpleDoFnRunner.java:185)
at
org.apache.beam.runners.core.SimpleDoFnRunner.processElement(SimpleDoFnRunner.java:146)
at
org.apache.beam.runners.flink.metrics.DoFnRunnerWithMetricsUpdate.processElement(DoFnRunnerWithMetricsUpdate.java:66)
at
org.apache.beam.runners.flink.translation.functions.FlinkDoFnFunction.mapPartition(FlinkDoFnFunction.java:120)
at
org.apache.flink.runtime.operators.MapPartitionDriver.run(MapPartitionDriver.java:103)
at org.apache.flink.runtime.operators.BatchTask.run(BatchTask.java:503)
at
org.apache.flink.runtime.operators.BatchTask.invoke(BatchTask.java:368)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:703)
at java.lang.Thread.run(Thread.java:748)
> Task :beam-sdks-java-nexmark:run FAILED
:beam-sdks-java-nexmark:run (Thread[Task worker for ':' Thread 4,5,main])
completed. Took 2 mins 35.471 secs.
FAILURE: Build failed with an exception.
* What went wrong:
Execution failed for task ':beam-sdks-java-nexmark:run'.
> Process 'command '/usr/local/asfpackages/java/jdk1.8.0_152/bin/java''
> finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --debug option to
get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 5.0.
See
https://docs.gradle.org/4.8/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 5m 32s
62 actionable tasks: 58 executed, 4 from cache
Publishing build scan...
https://gradle.com/s/xcfxck3ndgcia
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user
[email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]
Not sending mail to unregistered user [email protected]