See
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/1652/display/redirect?page=changes>
Changes:
[noreply] [BEAM-13982] Added output of logging for python E2E pytests (#17637)
[noreply] [BEAM-14473] Throw error if using globally windowed, unbounded side
[noreply] [BEAM-14440] Add basic fuzz tests to the coders package (#17587)
[noreply] [BEAM-14035 ] Implement BigQuerySchema Read/Write TransformProvider
[noreply] Add Akvelon to case-studies (#17611)
[noreply] Merge pull request #17520 from BEAM-12356 Close DatasetService leaked
[noreply] Adding eslint and lint configuration to TypeScript SDK (#17676)
------------------------------------------
[...truncated 605.71 KB...]
value: <
spec: <
urn: "beam:coders:javasdk:0.1"
payload:
"\202SNAPPY\000\000\000\000\001\000\000\000\001\000\000\000\210\326\001\350\254\355\000\005sr\000$org.apache.beam.sdk.coders.VoidCoder\271\277U\233\350\r\257U\002\000\000xr\000&j3\000\024Atomic\0055
\307\354\265\314\205tPF\002\0055\000*j5\000$Structured\0059\034s\277\022\016\325\3246\021\t9\000
j9\000\005/0C\335\325\211\256\274~\370\002\000\000xp"
>
>
>
coders: <
key: "c0"
value: <
spec: <
urn: "beam:coder:bytes:v1"
>
>
>
coders: <
key: "c1"
value: <
spec: <
urn: "beam:coder:global_window:v1"
>
>
>
coders: <
key: "c2"
value: <
spec: <
urn: "beam:coder:row:v1"
payload:
"\n\r\n\007role_id\032\002\020\003\022$abc5e222-ab8f-4f6f-800a-a58bb0f45675"
>
>
>
environments: <
key: "EzkhoYmfMkbeam:env:docker:v1"
value: <
urn: "beam:env:docker:v1"
payload: "\n apache/beam_java8_sdk:2.40.0.dev"
capabilities: "beam:coder:bytes:v1"
capabilities: "beam:coder:bool:v1"
capabilities: "beam:coder:varint:v1"
capabilities: "beam:coder:string_utf8:v1"
capabilities: "beam:coder:iterable:v1"
capabilities: "beam:coder:timer:v1"
capabilities: "beam:coder:kv:v1"
capabilities: "beam:coder:length_prefix:v1"
capabilities: "beam:coder:global_window:v1"
capabilities: "beam:coder:interval_window:v1"
capabilities: "beam:coder:custom_window:v1"
capabilities: "beam:coder:windowed_value:v1"
capabilities: "beam:coder:double:v1"
capabilities: "beam:coder:row:v1"
capabilities: "beam:coder:param_windowed_value:v1"
capabilities: "beam:coder:state_backed_iterable:v1"
capabilities: "beam:coder:sharded_key:v1"
capabilities: "beam:coder:nullable:v1"
capabilities: "beam:protocol:multi_core_bundle_processing:v1"
capabilities: "beam:protocol:progress_reporting:v1"
capabilities: "beam:protocol:harness_monitoring_infos:v1"
capabilities: "beam:protocol:control_request_elements_embedding:v1"
capabilities: "beam:protocol:state_caching:v1"
capabilities: "beam:version:sdk_base:apache/beam_java8_sdk:2.40.0.dev"
capabilities: "beam:transform:sdf_truncate_sized_restrictions:v1"
capabilities: "beam:transform:to_string:v1"
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nL/tmp/artifacts/icedtea-sound-ebvtNFkfFXg4aaYFuDnwKpwDSjzsaZqlqv5iKxPTr-U.jar\022@79bbed34591f15783869a605b839f02a9c034a3cec699aa5aafe622b13d3afe5"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\n=icedtea-sound-ebvtNFkfFXg4aaYFuDnwKpwDSjzsaZqlqv5iKxPTr-U.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nF/tmp/artifacts/jaccess-ULFTCPsb6cLYZ0f1BG1FQfczmHNaZCx8plXuRDKpBqE.jar\022@50b15308fb1be9c2d86747f5046d4541f73398735a642c7ca655ee4432a906a1"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\n7jaccess-ULFTCPsb6cLYZ0f1BG1FQfczmHNaZCx8plXuRDKpBqE.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nI/tmp/artifacts/localedata-dUHqyGxaTVCjfTI8MckPYarZ3_mwf62udkxaHi1aKns.jar\022@7541eac86c5a4d50a37d323c31c90f61aad9dff9b07fadae764c5a1e2d5a2a7b"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\n:localedata-dUHqyGxaTVCjfTI8MckPYarZ3_mwf62udkxaHi1aKns.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nF/tmp/artifacts/nashorn-XdUndQGroXOP9NCsfITpBERYcbbGXVHLjbvNWXCh-3A.jar\022@5dd5277501aba1738ff4d0ac7c84e904445871b6c65d51cb8dbbcd5970a1fb70"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\n7nashorn-XdUndQGroXOP9NCsfITpBERYcbbGXVHLjbvNWXCh-3A.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nG/tmp/artifacts/cldrdata-YqzuKX1QnLCOo0cwjKRdBhGrip_ltIJZg-APT60tUPA.jar\022@62acee297d509cb08ea347308ca45d0611ab8a9fe5b4825983e00f4fad2d50f0"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\n8cldrdata-YqzuKX1QnLCOo0cwjKRdBhGrip_ltIJZg-APT60tUPA.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\nD/tmp/artifacts/dnsns-dhEp186udEF6X6chZus-RJzWRmzlccxx1_btlXWayVI.jar\022@761129d7ceae74417a5fa72166eb3e449cd6466ce571cc71d7f6ed95759ac952"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload: "\n5dnsns-dhEp186udEF6X6chZus-RJzWRmzlccxx1_btlXWayVI.jar"
>
dependencies: <
type_urn: "beam:artifact:type:file:v1"
type_payload:
"\n\203\001/tmp/artifacts/beam-sdks-java-extensions-schemaio-expansion-service-2.40.0-SNAPSHOT-3sK0GbbO1QiM7sXM1MNMyfWBx7dEJmkLZWULAbiFVEg.jar\022@dec2b419b6ced5088ceec5ccd4c34cc9f581c7b74426690b65650b01b8855448"
role_urn: "beam:artifact:role:staging_to:v1"
role_payload:
"\ntbeam-sdks-java-extensions-schemaio-expansion-service-2.40.0-SNAPSHOT-3sK0GbbO1QiM7sXM1MNMyfWBx7dEJmkLZWULAbiFVEg.jar"
>
>
>
environments: <
key: "go"
value: <
urn: "beam:env:docker:v1"
payload: "\n\026apache/beam_go_sdk:dev"
capabilities: "beam:protocol:progress_reporting:v0"
capabilities: "beam:protocol:multi_core_bundle_processing:v1"
capabilities: "beam:version:sdk_base:go"
capabilities: "beam:coder:bytes:v1"
capabilities: "beam:coder:bool:v1"
capabilities: "beam:coder:varint:v1"
capabilities: "beam:coder:double:v1"
capabilities: "beam:coder:string_utf8:v1"
capabilities: "beam:coder:length_prefix:v1"
capabilities: "beam:coder:kv:v1"
capabilities: "beam:coder:iterable:v1"
capabilities: "beam:coder:state_backed_iterable:v1"
capabilities: "beam:coder:windowed_value:v1"
capabilities: "beam:coder:global_window:v1"
capabilities: "beam:coder:interval_window:v1"
capabilities: "beam:coder:row:v1"
capabilities: "beam:coder:nullable:v1"
dependencies: <
type_urn: "beam:artifact:type:file:v1"
role_urn: "beam:artifact:role:go_worker_binary:v1"
>
>
>
>
root_transform_ids: "e2"
root_transform_ids: "e3"
root_transform_ids: "s1"
root_transform_ids: "s3"
requirements: "beam:requirement:pardo:splittable_dofn:v1"
2022/05/16 20:52:42 Cross-compiling
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/test/integration/io/xlang/jdbc/jdbc_test.go>
as /tmp/worker-4-1652734362298260641
2022/05/16 20:52:53 Prepared job with id:
go-testjdbcio_postgresreadwrite_9bf14949-cd7b-4f5a-bc42-72d78edf24b7 and
staging token:
go-testjdbcio_postgresreadwrite_9bf14949-cd7b-4f5a-bc42-72d78edf24b7
2022/05/16 20:52:54 Staged binary artifact with token:
2022/05/16 20:52:54 Submitted job:
go0testjdbcio0postgresreadwrite-jenkins-0516205254-a6b87381_be0c8e51-2d7b-4544-aa5e-4400fbf3537b
2022/05/16 20:52:54 Job state: STOPPED
2022/05/16 20:52:54 Job state: STARTING
2022/05/16 20:52:54 Job state: RUNNING
2022/05/16 20:54:18 Job state: DONE
2022/05/16 20:54:18 Warning: 6 errors during metrics processing: [failed to
deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"
type:"beam:metrics:sum_int64:v1" payload:"\x01" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"}
failed to deduce Step from MonitoringInfo:
urn:"beam:metric:sampled_byte_size:v1"
type:"beam:metrics:distribution_int64:v1"
payload:"\x01\xe7\x0c\xe7\x0c\xe7\x0c" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"}
failed to deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"
type:"beam:metrics:sum_int64:v1" payload:"\x01" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"}
failed to deduce Step from MonitoringInfo: urn:"beam:metric:element_count:v1"
type:"beam:metrics:sum_int64:v1" payload:"\x01" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/PairWithRestriction0"}
failed to deduce Step from MonitoringInfo:
urn:"beam:metric:sampled_byte_size:v1"
type:"beam:metrics:distribution_int64:v1"
payload:"\x01\xe7\x0c\xe7\x0c\xe7\x0c" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/SplitAndSize0"}
failed to deduce Step from MonitoringInfo:
urn:"beam:metric:sampled_byte_size:v1"
type:"beam:metrics:distribution_int64:v1"
payload:"\x01\xda\x0c\xda\x0c\xda\x0c" labels:{key:"PCOLLECTION"
value:"EzkhoYmfMkExternal/JdbcIO.ReadRows/JdbcIO.Read/Create.Values/Read(CreateSource)/ParDo(OutputSingleSource)/ParMultiDo(OutputSingleSource).output/PairWithRestriction0"}]
--- PASS: TestJDBCIO_PostgresReadWrite (153.59s)
PASS
ok github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/jdbc
392.963s
=== RUN TestKafkaIO_BasicReadWrite
--- FAIL: TestKafkaIO_BasicReadWrite (0.01s)
panic: tried cross-language for beam:transform:org.apache.beam:kafka_write:v1
against localhost:33497 and failed
expanding external transform
expanding transform with ExpansionRequest:
components:{pcollections:{key:"n4" value:{unique_name:"n4"
coder_id:"c1@UctRLXDvjn" is_bounded:BOUNDED
windowing_strategy_id:"w0@UctRLXDvjn"}}
windowing_strategies:{key:"w0@UctRLXDvjn"
value:{window_fn:{urn:"beam:window_fn:global_windows:v1"}
merge_status:NON_MERGING window_coder_id:"c2@UctRLXDvjn" trigger:{default:{}}
accumulation_mode:DISCARDING output_time:END_OF_WINDOW
closing_behavior:EMIT_IF_NONEMPTY on_time_behavior:FIRE_IF_NONEMPTY
environment_id:"go"}} coders:{key:"c0@UctRLXDvjn"
value:{spec:{urn:"beam:coder:bytes:v1"}}} coders:{key:"c1@UctRLXDvjn"
value:{spec:{urn:"beam:coder:kv:v1"} component_coder_ids:"c0@UctRLXDvjn"
component_coder_ids:"c0@UctRLXDvjn"}} coders:{key:"c2@UctRLXDvjn"
value:{spec:{urn:"beam:coder:global_window:v1"}}} environments:{key:"go"
value:{}}} transform:{unique_name:"External"
spec:{urn:"beam:transform:org.apache.beam:kafka_write:v1"
payload:"\n}\n\x1c\n\x0eProducerConfig\x1a\n*\x08\n\x02\x10\x07\x12\x02\x10\x07\n\x0b\n\x05Topic\x1a\x02\x10\x07\n\x13\n\rKeySerializer\x1a\x02\x10\x07\n\x15\n\x0fValueSerializer\x1a\x02\x10\x07\x12$d97fbd8d-dde2-46d8-babd-f5372b6ab5e6\x12\xda\x01\x04\x00\x00\x00\x00\x01\x11bootstrap.servers\x0flocalhost:33985=xlang_kafkaio_basic_test_ef21fd96-4e8e-4a51-9bf5-99677bfed3a49org.apache.kafka.common.serialization.ByteArraySerializer9org.apache.kafka.common.serialization.ByteArraySerializer"}
inputs:{key:"i0" value:"n4"} environment_id:"go"} namespace:"UctRLXDvjn"
expansion failed
caused by:
rpc error: code = Unavailable desc = connection error: desc = "transport: Error
while dialing dial tcp 127.0.0.1:33497: connect: connection refused" [recovered]
panic: tried cross-language for
beam:transform:org.apache.beam:kafka_write:v1 against localhost:33497 and failed
expanding external transform
expanding transform with ExpansionRequest:
components:{pcollections:{key:"n4" value:{unique_name:"n4"
coder_id:"c1@UctRLXDvjn" is_bounded:BOUNDED
windowing_strategy_id:"w0@UctRLXDvjn"}}
windowing_strategies:{key:"w0@UctRLXDvjn"
value:{window_fn:{urn:"beam:window_fn:global_windows:v1"}
merge_status:NON_MERGING window_coder_id:"c2@UctRLXDvjn" trigger:{default:{}}
accumulation_mode:DISCARDING output_time:END_OF_WINDOW
closing_behavior:EMIT_IF_NONEMPTY on_time_behavior:FIRE_IF_NONEMPTY
environment_id:"go"}} coders:{key:"c0@UctRLXDvjn"
value:{spec:{urn:"beam:coder:bytes:v1"}}} coders:{key:"c1@UctRLXDvjn"
value:{spec:{urn:"beam:coder:kv:v1"} component_coder_ids:"c0@UctRLXDvjn"
component_coder_ids:"c0@UctRLXDvjn"}} coders:{key:"c2@UctRLXDvjn"
value:{spec:{urn:"beam:coder:global_window:v1"}}} environments:{key:"go"
value:{}}} transform:{unique_name:"External"
spec:{urn:"beam:transform:org.apache.beam:kafka_write:v1"
payload:"\n}\n\x1c\n\x0eProducerConfig\x1a\n*\x08\n\x02\x10\x07\x12\x02\x10\x07\n\x0b\n\x05Topic\x1a\x02\x10\x07\n\x13\n\rKeySerializer\x1a\x02\x10\x07\n\x15\n\x0fValueSerializer\x1a\x02\x10\x07\x12$d97fbd8d-dde2-46d8-babd-f5372b6ab5e6\x12\xda\x01\x04\x00\x00\x00\x00\x01\x11bootstrap.servers\x0flocalhost:33985=xlang_kafkaio_basic_test_ef21fd96-4e8e-4a51-9bf5-99677bfed3a49org.apache.kafka.common.serialization.ByteArraySerializer9org.apache.kafka.common.serialization.ByteArraySerializer"}
inputs:{key:"i0" value:"n4"} environment_id:"go"} namespace:"UctRLXDvjn"
expansion failed
caused by:
rpc error: code = Unavailable desc = connection error: desc = "transport: Error
while dialing dial tcp 127.0.0.1:33497: connect: connection refused"
goroutine 21 [running]:
testing.tRunner.func1.2({0xca3820, 0xc00049e300})
/home/jenkins/sdk/go1.18.1/src/testing/testing.go:1389 +0x24e
testing.tRunner.func1()
/home/jenkins/sdk/go1.18.1/src/testing/testing.go:1392 +0x39f
panic({0xca3820, 0xc00049e300})
/home/jenkins/sdk/go1.18.1/src/runtime/panic.go:838 +0x207
github.com/apache/beam/sdks/v2/go/pkg/beam.CrossLanguage({0xc0004a6300?,
0xc0004dc370?}, {0xdc9e9e, 0x2d}, {0xc00013e6e0?, 0x0?, 0x5?}, {0xc0003594e0,
0xf}, 0xc000175c60, ...)
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/pkg/beam/xlang.go>:162
+0x136
github.com/apache/beam/sdks/v2/go/pkg/beam/io/xlang/kafkaio.Write({0xc0004a6ae0?,
0xc0004dc370?}, {0xc0003594e0, 0xf}, {0xc0003594a0, 0xf}, {0xc000143c80,
0x3d}, {0xc000175e60?}, {0x0, ...})
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/pkg/beam/io/xlang/kafkaio/kafka.go>:278
+0x4ad
github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/kafka.writeInts({0xc0004a6ac0?,
0xc0004dc370?}, {0xc0003594e0, 0xf}, {0xc0003594a0, 0xf}, {0xc000143c80,
0x3d}, {0xc0004e8000, 0x3e8, ...})
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/test/integration/io/xlang/kafka/kafka.go>:48
+0x24b
github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/kafka.WritePipeline({0xc0003594e0,
0xf}, {0xc0003594a0, 0xf}, {0xc000143c80, 0x3d}, {0xc0004e8000, 0x3e8, 0x3e8})
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/test/integration/io/xlang/kafka/kafka.go>:70
+0x157
github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/kafka.TestKafkaIO_BasicReadWrite(0xc0004b44e0)
<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/test/integration/io/xlang/kafka/kafka_test.go>:61
+0xed
testing.tRunner(0xc0004b44e0, 0xdfe4f8)
/home/jenkins/sdk/go1.18.1/src/testing/testing.go:1439 +0x102
created by testing.(*T).Run
/home/jenkins/sdk/go1.18.1/src/testing/testing.go:1486 +0x35f
FAIL github.com/apache/beam/sdks/v2/go/test/integration/io/xlang/kafka
6.155s
FAIL
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerGoUsingJava
> FAILED
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerJavaUsingJava
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerJavaUsingPython
> Task
> :runners:spark:3:job-server:validatesCrossLanguageRunnerJavaUsingPythonOnly
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingJava
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerPythonUsingPython
> Task :runners:spark:3:job-server:validatesCrossLanguageRunnerCleanup
> Task :runners:spark:3:job-server:sparkJobServerCleanup
FAILURE: Build completed with 2 failures.
1: Task failed with an exception.
-----------
* What went wrong:
Execution failed for task ':sdks:java:io:google-cloud-platform:compileJava'.
> Failed to store cache entry for task
> ':sdks:java:io:google-cloud-platform:compileJava'
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
2: Task failed with an exception.
-----------
* Where:
Build file
'<https://ci-beam.apache.org/job/beam_PostCommit_XVR_Spark3/ws/src/sdks/go/test/build.gradle'>
line: 195
* What went wrong:
Execution failed for task
':runners:spark:3:job-server:validatesCrossLanguageRunnerGoUsingJava'.
> Process 'command 'sh'' finished with non-zero exit value 1
* Try:
> Run with --stacktrace option to get the stack trace.
> Run with --info or --debug option to get more log output.
> Run with --scan to get full insights.
==============================================================================
* Get more help at https://help.gradle.org
Deprecated Gradle features were used in this build, making it incompatible with
Gradle 8.0.
You can use '--warning-mode all' to show the individual deprecation warnings
and determine if they come from your own scripts or plugins.
See
https://docs.gradle.org/7.4/userguide/command_line_interface.html#sec:command_line_warnings
Execution optimizations have been disabled for 1 invalid unit(s) of work during
this build to ensure correctness.
Please consult deprecation warnings for more details.
BUILD FAILED in 52m 46s
241 actionable tasks: 185 executed, 45 from cache, 11 up-to-date
Publishing build scan...
https://gradle.com/s/i3srbhbbysnnm
Build step 'Invoke Gradle script' changed build result to FAILURE
Build step 'Invoke Gradle script' marked build as failure
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]