See <https://builds.apache.org/job/beam_PostCommit_Go/5529/display/redirect>
Changes:
------------------------------------------
[...truncated 339.10 KB...]
"@type": "OutputReference",
"step_name": "e13",
"output_name": "i0"
},
"serialized_fn":
"%0A%27%22%25%0A%02c1%12%1F%0A%1D%0A%1Bbeam:coder:global_window:v1j9%0A%25%0A%23%0A%21beam:windowfn:global_windows:v0.1%10%01%1A%02c1%22%02:%00%28%010%018%02H%01"
}
},
{
"kind": "ParallelDo",
"name": "e15",
"properties": {
"user_name": "passert.Sum(b)/passert.sumFn'1",
"output_info": [
{
"user_name": "bogus",
"output_name": "bogus",
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
}
}
],
"parallel_input": {
"@type": "OutputReference",
"step_name": "e14",
"output_name": "i0"
},
"serialized_fn": "e15"
}
},
{
"kind": "ParallelDo",
"name": "e10",
"properties": {
"user_name": "passert.Sum(a)/beam.addFixedKeyFn",
"output_info": [
{
"user_name": "i0",
"output_name": "i0",
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:pair",
"component_encodings": [
{
"@type": "kind:length_prefix",
"component_encodings": [
{
"@type":
"Cgd2YXJpbnR6EgIIAhqFAQpxZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9jb3JlL3J1bnRpbWUvY29kZXJ4LmVuY1ZhckludFoSEAgWIgQIGUAPKgYIFBICCAgikQEKcWdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vY29yZS9ydW50aW1lL2NvZGVyeC5kZWNWYXJJbnRaEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
}
]
},
{
"@type": "kind:length_prefix",
"component_encodings": [
{
"@type":
"Cgd2YXJpbnR6EgIIAhqFAQpxZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9jb3JlL3J1bnRpbWUvY29kZXJ4LmVuY1ZhckludFoSEAgWIgQIGUAPKgYIFBICCAgikQEKcWdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vY29yZS9ydW50aW1lL2NvZGVyeC5kZWNWYXJJbnRaEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
}
]
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
}
}
],
"parallel_input": {
"@type": "OutputReference",
"step_name": "e9",
"output_name": "i0"
},
"serialized_fn": "e10"
}
},
{
"kind": "GroupByKey",
"name": "e11",
"properties": {
"user_name": "passert.Sum(a)/CoGBK",
"output_info": [
{
"user_name": "i0",
"output_name": "i0",
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:pair",
"component_encodings": [
{
"@type": "kind:length_prefix",
"component_encodings": [
{
"@type":
"Cgd2YXJpbnR6EgIIAhqFAQpxZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9jb3JlL3J1bnRpbWUvY29kZXJ4LmVuY1ZhckludFoSEAgWIgQIGUAPKgYIFBICCAgikQEKcWdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vY29yZS9ydW50aW1lL2NvZGVyeC5kZWNWYXJJbnRaEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
}
]
},
{
"@type": "kind:stream",
"component_encodings": [
{
"@type": "kind:length_prefix",
"component_encodings": [
{
"@type":
"Cgd2YXJpbnR6EgIIAhqFAQpxZ2l0aHViLmNvbS9hcGFjaGUvYmVhbS9zZGtzL2dvL3Rlc3QvdmVuZG9yL2dpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby9wa2cvYmVhbS9jb3JlL3J1bnRpbWUvY29kZXJ4LmVuY1ZhckludFoSEAgWIgQIGUAPKgYIFBICCAgikQEKcWdpdGh1Yi5jb20vYXBhY2hlL2JlYW0vc2Rrcy9nby90ZXN0L3ZlbmRvci9naXRodWIuY29tL2FwYWNoZS9iZWFtL3Nka3MvZ28vcGtnL2JlYW0vY29yZS9ydW50aW1lL2NvZGVyeC5kZWNWYXJJbnRaEhwIFiIECBlAAyIGCBQSAggIKgQIGUAPKgQIGUAB"
}
]
}
],
"is_stream_like": true
}
],
"is_pair_like": true
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
}
}
],
"parallel_input": {
"@type": "OutputReference",
"step_name": "e10",
"output_name": "i0"
},
"serialized_fn":
"%0A%27%22%25%0A%02c1%12%1F%0A%1D%0A%1Bbeam:coder:global_window:v1j9%0A%25%0A%23%0A%21beam:windowfn:global_windows:v0.1%10%01%1A%02c1%22%02:%00%28%010%018%02H%01"
}
},
{
"kind": "ParallelDo",
"name": "e12",
"properties": {
"user_name": "passert.Sum(a)/passert.sumFn",
"output_info": [
{
"user_name": "bogus",
"output_name": "bogus",
"encoding": {
"@type": "kind:windowed_value",
"component_encodings": [
{
"@type": "kind:bytes"
},
{
"@type": "kind:global_window"
}
],
"is_wrapper": true
}
}
],
"parallel_input": {
"@type": "OutputReference",
"step_name": "e11",
"output_name": "i0"
},
"serialized_fn": "e12"
}
}
],
"type": "JOB_TYPE_BATCH"
}
2019/12/07 12:04:19 Test pardo:multioutput failed: googleapi: Error 400:
(cca4841534b6e020): The workflow could not be created. Causes:
(708a7e1d00740db9): Dataflow quota error for jobs-per-project quota. Project
apache-beam-testing is running 301 jobs. Please check the quota usage via GCP
Console. If it exceeds the limit, please wait for a workflow to finish or
contact Google Cloud Support to request an increase in quota. If it does not,
contact Google Cloud Support., failedPrecondition
2019/12/07 12:04:19 Test wordcount:memfs failed: googleapi: Error 400:
(15b614ddd2834ec6): The workflow could not be created. Causes:
(ef6d399b9f965fa7): Dataflow quota error for jobs-per-project quota. Project
apache-beam-testing is running 301 jobs. Please check the quota usage via GCP
Console. If it exceeds the limit, please wait for a workflow to finish or
contact Google Cloud Support to request an increase in quota. If it does not,
contact Google Cloud Support., failedPrecondition
2019/12/07 12:04:19 Test pardo:kvsideinput failed: googleapi: Error 400:
(1557de9d82d5b283): The workflow could not be created. Causes:
(5f16cfa3b2898346): Dataflow quota error for jobs-per-project quota. Project
apache-beam-testing is running 301 jobs. Please check the quota usage via GCP
Console. If it exceeds the limit, please wait for a workflow to finish or
contact Google Cloud Support to request an increase in quota. If it does not,
contact Google Cloud Support., failedPrecondition
2019/12/07 12:04:19 Test wordcount:kinglear failed: googleapi: Error 400:
(5375201071c5afb2): The workflow could not be created. Causes:
(b7f7713e1f79a05f): Dataflow quota error for jobs-per-project quota. Project
apache-beam-testing is running 301 jobs. Please check the quota usage via GCP
Console. If it exceeds the limit, please wait for a workflow to finish or
contact Google Cloud Support to request an increase in quota. If it does not,
contact Google Cloud Support., failedPrecondition
2019/12/07 12:04:20 Test cogbk:cogbk failed: googleapi: Error 400:
(27707eaafc8e539f): The workflow could not be created. Causes:
(25b77bf9e0c88915): Dataflow quota error for jobs-per-project quota. Project
apache-beam-testing is running 301 jobs. Please check the quota usage via GCP
Console. If it exceeds the limit, please wait for a workflow to finish or
contact Google Cloud Support to request an increase in quota. If it does not,
contact Google Cloud Support., failedPrecondition
2019/12/07 12:04:20 Result: 7 tests failed
if [[ ! -z "$JOB_PORT" ]]; then
# Shut down the job server
kill %1 || echo "Failed to shut down job server"
fi
# Delete the container locally and remotely
docker rmi $CONTAINER:$TAG || echo "Failed to remove container"
Untagged: us.gcr.io/apache-beam-testing/jenkins/go_sdk:20191207-120316
Untagged:
us.gcr.io/apache-beam-testing/jenkins/go_sdk@sha256:f3962d8fe02060bfea8c3d845d2be4262d7756565e61a3e73975a05143223a4f
Deleted: sha256:fbe5d2a43ecff435e01f1308971ccb24d70f858efda4b0f032aa67c5fcba69d1
Deleted: sha256:2911e3cd50947fc0d0e55a98684b995b4774e0aed932d2d1aeeb7a92ea79ed04
Deleted: sha256:05b9e4487f78f865e9ec5ef7f072f575e0754d963c738d7434f18909f61ecde1
Deleted: sha256:48076aadadf1fd8cbb128971baff66c17da62472f14f76d7da456c11f6386f22
Deleted: sha256:1b9a0c2e4c8ddae0a4942d23aa3c1b1e3cda6c8fd95fc27b0f61f00132abd4c9
Deleted: sha256:d16b7d6a3f0c99df41b4302d8ca6c37ef8f78bd59261c920530c2812ff3b6b37
gcloud --quiet container images delete $CONTAINER:$TAG || echo "Failed to
delete container"
Digests:
-
us.gcr.io/apache-beam-testing/jenkins/go_sdk@sha256:f3962d8fe02060bfea8c3d845d2be4262d7756565e61a3e73975a05143223a4f
Associated tags:
- 20191207-120316
Tags:
- us.gcr.io/apache-beam-testing/jenkins/go_sdk:20191207-120316
Deleted [us.gcr.io/apache-beam-testing/jenkins/go_sdk:20191207-120316].
Deleted
[us.gcr.io/apache-beam-testing/jenkins/go_sdk@sha256:f3962d8fe02060bfea8c3d845d2be4262d7756565e61a3e73975a05143223a4f].
# Clean up tempdir
rm -rf $TMPDIR
>>> FAILURE
if [[ "$TEST_EXIT_CODE" -eq 0 ]]; then
echo ">>> SUCCESS"
else
echo ">>> FAILURE"
fi
exit $TEST_EXIT_CODE
> Task :goIntegrationTests FAILED
FAILURE: Build failed with an exception.
* Where:
Build file
'<https://builds.apache.org/job/beam_PostCommit_Go/ws/src/build.gradle'> line:
195
* What went wrong:
Execution failed for task ':goIntegrationTests'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
Run with --stacktrace option to get the stack trace. Run with --info or --debug
option to get more log output. Run with --scan to get full insights.
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 6.0.
Use '--warning-mode all' to show the individual deprecation warnings.
See
https://docs.gradle.org/5.2.1/userguide/command_line_interface.html#sec:command_line_warnings
BUILD FAILED in 4m 5s
72 actionable tasks: 55 executed, 17 from cache
Publishing build scan...
https://scans.gradle.com/s/5j4ehtrbjyz5a
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]