See 
<https://ci-beam.apache.org/job/beam_LoadTests_Python_Combine_Flink_Streaming/1/display/redirect>

Changes:


------------------------------------------
[...truncated 58.30 KB...]
> Task :sdks:java:extensions:join-library:createCheckerFrameworkManifest
> Task :runners:flink:1.10:job-server:createCheckerFrameworkManifest
> Task :model:job-management:extractProto
> Task :model:fn-execution:extractProto
> Task :sdks:java:extensions:protobuf:extractProto
> Task :runners:flink:1.10:copySourceOverrides
> Task :runners:flink:1.10:job-server:processResources NO-SOURCE
> Task :sdks:java:io:google-cloud-platform:processResources NO-SOURCE
> Task :sdks:java:extensions:join-library:processResources NO-SOURCE
> Task :runners:flink:1.10:copyTestResourcesOverrides NO-SOURCE
> Task :sdks:java:extensions:protobuf:processResources NO-SOURCE
> Task :sdks:java:io:hadoop-common:processResources NO-SOURCE
> Task :sdks:java:io:kafka:processResources NO-SOURCE
> Task :model:job-management:processResources
> Task :model:fn-execution:processResources
> Task :sdks:java:expansion-service:processResources NO-SOURCE
> Task :sdks:java:io:mongodb:processResources NO-SOURCE
> Task :sdks:java:io:parquet:processResources NO-SOURCE
> Task :sdks:java:core:generateGrammarSource FROM-CACHE
> Task :sdks:java:extensions:sql:zetasql:processResources NO-SOURCE
> Task :runners:flink:1.10:processResources
> Task 
> :sdks:java:extensions:sql:expansion-service:createCheckerFrameworkManifest
> Task :runners:flink:1.10:job-server-container:copyLicenses
> Task :sdks:java:extensions:sql:expansion-service:processResources NO-SOURCE
> Task :runners:flink:1.10:job-server-container:dockerClean UP-TO-DATE
> Task :sdks:java:core:processResources
> Task :sdks:java:build-tools:compileJava FROM-CACHE
> Task :sdks:java:build-tools:processResources
> Task :sdks:java:build-tools:classes
> Task :sdks:java:build-tools:jar
> Task :model:pipeline:extractIncludeProto
> Task :model:pipeline:extractProto
> Task :sdks:java:extensions:sql:copyFmppTemplatesFromCalciteCore
> Task :sdks:java:extensions:sql:copyFmppTemplatesFromSrc
> Task :model:pipeline:generateProto
> Task :model:pipeline:compileJava FROM-CACHE
> Task :model:pipeline:processResources
> Task :model:pipeline:classes
> Task :sdks:java:extensions:sql:generateFmppSources
> Task :model:pipeline:jar
> Task :model:fn-execution:extractIncludeProto
> Task :model:job-management:extractIncludeProto
> Task :model:job-management:generateProto
> Task :model:fn-execution:generateProto
> Task :model:job-management:compileJava FROM-CACHE
> Task :model:job-management:classes
> Task :model:fn-execution:compileJava FROM-CACHE
> Task :model:fn-execution:classes

> Task :sdks:java:extensions:sql:compileJavacc
Java Compiler Compiler Version 4.0 (Parser Generator)
(type "javacc" with no arguments for help)
Reading from file 
<https://ci-beam.apache.org/job/beam_LoadTests_Python_Combine_Flink_Streaming/ws/src/sdks/java/extensions/sql/build/generated/fmpp/javacc/Parser.jj>
 . . .
Note: UNICODE_INPUT option is specified. Please make sure you create the 
parser/lexer using a Reader with the correct character encoding.
Warning: Lookahead adequacy checking not being performed since option LOOKAHEAD 
is more than 1.  Set option FORCE_LA_CHECK to true to force checking.
File "TokenMgrError.java" does not exist.  Will create one.
File "ParseException.java" does not exist.  Will create one.
File "Token.java" does not exist.  Will create one.
File "SimpleCharStream.java" does not exist.  Will create one.
Parser generated with 0 errors and 1 warnings.

> Task :sdks:java:extensions:sql:processResources
> Task :model:pipeline:shadowJar
> Task :model:job-management:shadowJar
> Task :model:fn-execution:shadowJar
> Task :sdks:java:core:compileJava FROM-CACHE
> Task :sdks:java:core:classes
> Task :sdks:java:core:shadowJar
> Task :sdks:java:extensions:protobuf:extractIncludeProto
> Task :sdks:java:extensions:protobuf:generateProto NO-SOURCE
> Task :sdks:java:extensions:google-cloud-platform-core:compileJava FROM-CACHE
> Task :sdks:java:extensions:google-cloud-platform-core:classes UP-TO-DATE
> Task :runners:local-java:compileJava FROM-CACHE
> Task :runners:local-java:classes UP-TO-DATE
> Task :runners:local-java:jar
> Task :vendor:sdks-java-extensions-protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:join-library:compileJava FROM-CACHE
> Task :vendor:sdks-java-extensions-protobuf:classes UP-TO-DATE
> Task :sdks:java:extensions:join-library:classes UP-TO-DATE
> Task :sdks:java:extensions:google-cloud-platform-core:jar
> Task :sdks:java:extensions:join-library:jar
> Task :runners:core-construction-java:compileJava FROM-CACHE
> Task :runners:core-construction-java:classes UP-TO-DATE
> Task :sdks:java:io:mongodb:compileJava FROM-CACHE
> Task :sdks:java:io:mongodb:classes UP-TO-DATE
> Task :sdks:java:io:mongodb:jar
> Task :vendor:sdks-java-extensions-protobuf:shadowJar
> Task :sdks:java:fn-execution:compileJava FROM-CACHE
> Task :sdks:java:fn-execution:classes UP-TO-DATE
> Task :runners:core-construction-java:jar
> Task :sdks:java:fn-execution:jar
> Task :sdks:java:io:hadoop-common:compileJava FROM-CACHE
> Task :sdks:java:io:hadoop-common:classes UP-TO-DATE
> Task :sdks:java:io:hadoop-common:jar
> Task :sdks:java:core:jar
> Task :sdks:java:io:parquet:compileJava FROM-CACHE
> Task :sdks:java:io:parquet:classes UP-TO-DATE
> Task :sdks:java:io:parquet:jar
> Task :sdks:java:extensions:protobuf:compileJava FROM-CACHE
> Task :sdks:java:extensions:protobuf:classes UP-TO-DATE
> Task :runners:core-java:compileJava FROM-CACHE
> Task :runners:core-java:classes UP-TO-DATE
> Task :sdks:java:extensions:protobuf:jar
> Task :runners:core-java:jar
> Task :sdks:java:harness:compileJava FROM-CACHE
> Task :sdks:java:harness:classes UP-TO-DATE
> Task :sdks:java:harness:jar
> Task :sdks:java:harness:shadowJar
> Task :runners:java-fn-execution:compileJava FROM-CACHE
> Task :runners:java-fn-execution:classes UP-TO-DATE
> Task :runners:java-fn-execution:jar
> Task :sdks:java:expansion-service:compileJava FROM-CACHE
> Task :sdks:java:expansion-service:classes UP-TO-DATE
> Task :sdks:java:expansion-service:jar
> Task :runners:direct-java:compileJava FROM-CACHE
> Task :runners:direct-java:classes UP-TO-DATE
> Task :runners:java-job-service:compileJava FROM-CACHE
> Task :runners:java-job-service:classes UP-TO-DATE
> Task :runners:java-job-service:jar
> Task :sdks:java:io:kafka:compileJava FROM-CACHE
> Task :sdks:java:io:kafka:classes UP-TO-DATE
> Task :sdks:java:io:kafka:jar
> Task :runners:flink:1.10:compileJava FROM-CACHE
> Task :runners:flink:1.10:classes
> Task :sdks:java:io:google-cloud-platform:compileJava FROM-CACHE
> Task :sdks:java:io:google-cloud-platform:classes UP-TO-DATE
> Task :runners:flink:1.10:jar
> Task :runners:flink:1.10:job-server:compileJava NO-SOURCE
> Task :runners:flink:1.10:job-server:classes UP-TO-DATE
> Task :sdks:java:io:google-cloud-platform:jar
> Task :runners:direct-java:shadowJar
> Task :sdks:java:extensions:sql:compileJava FROM-CACHE
> Task :sdks:java:extensions:sql:classes
> Task :sdks:java:extensions:sql:jar
> Task :sdks:java:extensions:sql:zetasql:compileJava FROM-CACHE
> Task :sdks:java:extensions:sql:zetasql:classes UP-TO-DATE
> Task :sdks:java:extensions:sql:zetasql:jar
> Task :sdks:java:extensions:sql:expansion-service:compileJava FROM-CACHE
> Task :sdks:java:extensions:sql:expansion-service:classes UP-TO-DATE
> Task :sdks:java:extensions:sql:expansion-service:jar
> Task :sdks:java:extensions:sql:expansion-service:shadowJar
> Task :runners:flink:1.10:job-server:shadowJar
> Task :runners:flink:1.10:job-server-container:copyDockerfileDependencies
> Task :runners:flink:1.10:job-server-container:dockerPrepare
> Task :runners:flink:1.10:job-server-container:docker
> Task :runners:flink:1.10:job-server-container:dockerTag

> Task :runners:flink:1.10:job-server-container:dockerPush
The push refers to repository 
[gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server]
e2423772cf57: Preparing
4ed663d5cfbc: Preparing
9c35d03a069c: Preparing
64c94576481c: Preparing
5db2f59b1f32: Preparing
fd48b7313a1f: Preparing
16e8bdbf703d: Preparing
de9aadc6b492: Preparing
e5df62d9b33a: Preparing
7a9460d53218: Preparing
b2765ac0333a: Preparing
0ced13fcf944: Preparing
fd48b7313a1f: Waiting
16e8bdbf703d: Waiting
e5df62d9b33a: Waiting
de9aadc6b492: Waiting
7a9460d53218: Waiting
0ced13fcf944: Waiting
b2765ac0333a: Waiting
4ed663d5cfbc: Pushed
9c35d03a069c: Pushed
fd48b7313a1f: Layer already exists
16e8bdbf703d: Layer already exists
e5df62d9b33a: Layer already exists
de9aadc6b492: Layer already exists
e2423772cf57: Pushed
7a9460d53218: Layer already exists
b2765ac0333a: Layer already exists
0ced13fcf944: Layer already exists
5db2f59b1f32: Pushed
64c94576481c: Pushed
0827154638-combine-streaming: digest: 
sha256:064bba6afc50acacf94e520aca1e92c15968a97fba0f71d09df67378b79ae823 size: 
2841

Deprecated Gradle features were used in this build, making it incompatible with 
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See 
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings

BUILD SUCCESSFUL in 4m 1s
94 actionable tasks: 65 executed, 28 from cache, 1 up-to-date

Publishing build scan...
https://gradle.com/s/t7c3irqpmihhk

[EnvInject] - Injecting environment variables from a build step.
[EnvInject] - Injecting as environment variables the properties content 
JOB_SERVER_IMAGE=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:0827154638-combine-streaming
CLUSTER_NAME=beam-loadtests-python-combine-flink-streaming-1
DETACHED_MODE=true
HARNESS_IMAGES_TO_PULL=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:0827154638-combine-streaming
FLINK_NUM_WORKERS=16
FLINK_DOWNLOAD_URL=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz
GCS_BUCKET=gs://beam-flink-cluster
HADOOP_DOWNLOAD_URL=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
FLINK_TASKMANAGER_SLOTS=1
ARTIFACTS_DIR=gs://beam-flink-cluster/beam-loadtests-python-combine-flink-streaming-1
GCLOUD_ZONE=us-central1-a

[EnvInject] - Variables injected successfully.
[beam_LoadTests_Python_Combine_Flink_Streaming] $ /bin/bash -xe 
/tmp/jenkins7051977529961116936.sh
+ echo Setting up flink cluster
Setting up flink cluster
[beam_LoadTests_Python_Combine_Flink_Streaming] $ /bin/bash -xe 
/tmp/jenkins1935810122574628250.sh
+ cd 
<https://ci-beam.apache.org/job/beam_LoadTests_Python_Combine_Flink_Streaming/ws/src/.test-infra/dataproc>
+ ./flink_cluster.sh create
+ GCLOUD_ZONE=us-central1-a
+ DATAPROC_VERSION=1.2
+ MASTER_NAME=beam-loadtests-python-combine-flink-streaming-1-m
+ INIT_ACTIONS_FOLDER_NAME=init-actions
+ FLINK_INIT=gs://beam-flink-cluster/init-actions/flink.sh
+ BEAM_INIT=gs://beam-flink-cluster/init-actions/beam.sh
+ DOCKER_INIT=gs://beam-flink-cluster/init-actions/docker.sh
+ FLINK_LOCAL_PORT=8081
+ FLINK_TASKMANAGER_SLOTS=1
+ YARN_APPLICATION_MASTER=
+ create
+ upload_init_actions
+ echo 'Uploading initialization actions to GCS bucket: gs://beam-flink-cluster'
Uploading initialization actions to GCS bucket: gs://beam-flink-cluster
+ gsutil cp -r init-actions/beam.sh init-actions/docker.sh 
init-actions/flink.sh gs://beam-flink-cluster/init-actions
Copying file://init-actions/beam.sh [Content-Type=text/x-sh]...
/ [0 files][    0.0 B/  2.3 KiB]                                                
/ [1 files][  2.3 KiB/  2.3 KiB]                                                
Copying file://init-actions/docker.sh [Content-Type=text/x-sh]...
/ [1 files][  2.3 KiB/  6.0 KiB]                                                
/ [2 files][  6.0 KiB/  6.0 KiB]                                                
Copying file://init-actions/flink.sh [Content-Type=text/x-sh]...
/ [2 files][  6.0 KiB/ 13.7 KiB]                                                
/ [3 files][ 13.7 KiB/ 13.7 KiB]                                                
Operation completed over 3 objects/13.7 KiB.                                    
 
+ create_cluster
+ local 
metadata=flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,
+ metadata+=flink-start-yarn-session=true,
+ metadata+=flink-taskmanager-slots=1,
+ 
metadata+=hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar
+ [[ -n 
gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:0827154638-combine-streaming
 ]]
+ 
metadata+=,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:0827154638-combine-streaming
+ [[ -n 
gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:0827154638-combine-streaming
 ]]
+ 
metadata+=,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:0827154638-combine-streaming
+ local image_version=1.2
+ echo 'Starting dataproc cluster. Dataproc version: 1.2'
Starting dataproc cluster. Dataproc version: 1.2
+ local num_dataproc_****s=17
+ gcloud dataproc clusters create 
beam-loadtests-python-combine-flink-streaming-1 --region=global --num-****s=17 
--initialization-actions 
gs://beam-flink-cluster/init-actions/docker.sh,gs://beam-flink-cluster/init-actions/beam.sh,gs://beam-flink-cluster/init-actions/flink.sh
 --metadata 
flink-snapshot-url=https://archive.apache.org/dist/flink/flink-1.10.1/flink-1.10.1-bin-scala_2.11.tgz,flink-start-yarn-session=true,flink-taskmanager-slots=1,hadoop-jar-url=https://repo.maven.apache.org/maven2/org/apache/flink/flink-shaded-hadoop-2-uber/2.8.3-9.0/flink-shaded-hadoop-2-uber-2.8.3-9.0.jar,beam-sdk-harness-images-to-pull=gcr.io/apache-beam-testing/beam_portability/beam_python3.7_sdk:0827154638-combine-streaming,beam-job-server-image=gcr.io/apache-beam-testing/beam_portability/beam_flink1.10_job_server:0827154638-combine-streaming,
 --image-version=1.2 --zone=us-central1-a --quiet
ERROR: (gcloud.dataproc.clusters.create) ALREADY_EXISTS: Already exists: Failed 
to create cluster: Cluster 
projects/apache-beam-testing/regions/global/clusters/beam-loadtests-python-combine-flink-streaming-1
Build step 'Execute shell' marked build as failure

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to