See <https://ci-beam.apache.org/job/beam_PostCommit_Python38/1134/display/redirect?page=changes>
Changes: [Boyuan Zhang] [BEAM-12193] Add user metrics to show founded TopicPartition [yifanmai] Update annotations in ValidateRunner tests for pack_combiners [yifanmai] Run pack_combiners phase by default in DataflowRunner [yifanmai] Fix tests [yifanmai] Fix tests [yifanmai] Lint [yifanmai] Lint [suztomo] [BEAM-8357] Upgrading auto-value to 1.8.1 from 1.8 [mackenzieclark] Fixing counter names from PR https://github.com/apache/beam/pull/14233 [noreply] [BEAM-12204] Allow unboundedly large side inputs in portable Java. [Boyuan Zhang] [BEAM-12192] Have WatchKafkaTopicPartitionDoFn respects topics given by [noreply] [BEAM-10937] Tour of Beam use FileSystems for I/O (#14431) ------------------------------------------ [...truncated 42.66 MB...] ], "parallel_input": { "@type": "OutputReference", "output_name": "out", "step_name": "s1" }, "serialized_fn": "ref_AppliedPTransform_ReadFromPubSub-Map-_from_proto_str-_4", "user_name": "ReadFromPubSub/Map(_from_proto_str)" } }, { "kind": "ParallelDo", "name": "s3", "properties": { "display_data": [ { "key": "fn", "label": "Transform Function", "namespace": "apache_beam.transforms.core.ParDo", "shortValue": "CallableWrapperDoFn", "type": "STRING", "value": "apache_beam.transforms.core.CallableWrapperDoFn" }, { "key": "fn", "label": "Transform Function", "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", "type": "STRING", "value": "add_attribute" } ], "non_parallel_inputs": {}, "output_info": [ { "encoding": { "@type": "kind:windowed_value", "component_encodings": [ { "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY", "component_encodings": [ { "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY", "component_encodings": [], "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3" }, { "@type": "FastPrimitivesCoder$eNprYJmaxgABPZKJBYnJGanxSamJuXrJ+SmpRcVQakqPsFticUlAUWZuZklmWWqxM0h4yuQpmo1Taqf08MenJebkJCUmZ8eD1U/JYOjhDshMzs5JRVGYVJykBwDOUCqY", "component_encodings": [], "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3" } ], "is_pair_like": true, "pipeline_proto_coder_id": "ref_Coder_FastPrimitivesCoder_3" }, { "@type": "kind:global_window" } ], "is_wrapper": true }, "output_name": "None", "user_name": "add_attribute.out" } ], "parallel_input": { "@type": "OutputReference", "output_name": "None", "step_name": "s2" }, "serialized_fn": "ref_AppliedPTransform_add_attribute_5", "user_name": "add_attribute" } }, { "kind": "ParallelDo", "name": "s4", "properties": { "display_data": [ { "key": "fn", "label": "Transform Function", "namespace": "apache_beam.transforms.core.ParDo", "shortValue": "CallableWrapperDoFn", "type": "STRING", "value": "apache_beam.transforms.core.CallableWrapperDoFn" }, { "key": "fn", "label": "Transform Function", "namespace": "apache_beam.transforms.core.CallableWrapperDoFn", "type": "STRING", "value": "message_to_proto_str" } ], "non_parallel_inputs": {}, "output_info": [ { "encoding": { "@type": "kind:windowed_value", "component_encodings": [ { "@type": "kind:bytes" }, { "@type": "kind:global_window" } ], "is_wrapper": true }, "output_name": "None", "user_name": "WriteToPubSub/ToProtobuf.out" } ], "parallel_input": { "@type": "OutputReference", "output_name": "None", "step_name": "s3" }, "serialized_fn": "ref_AppliedPTransform_WriteToPubSub-ToProtobuf_7", "user_name": "WriteToPubSub/ToProtobuf" } }, { "kind": "ParallelWrite", "name": "s5", "properties": { "display_data": [], "encoding": { "@type": "kind:windowed_value", "component_encodings": [ { "@type": "kind:bytes" }, { "@type": "kind:global_window" } ], "is_wrapper": true }, "format": "pubsub", "parallel_input": { "@type": "OutputReference", "output_name": "None", "step_name": "s4" }, "pubsub_id_label": "id", "pubsub_serialized_attributes_fn": "", "pubsub_timestamp_label": "timestamp", "pubsub_topic": "projects/apache-beam-testing/topics/psit_topic_outputb8e51dc9-f8d1-4e19-8633-a0da5d1293f6", "user_name": "WriteToPubSub/Write/NativeWrite" } } ], "type": "JOB_TYPE_STREAMING" } apache_beam.runners.dataflow.internal.apiclient: INFO: Create job: <Job createTime: '2021-04-24T01:13:29.386600Z' currentStateTime: '1970-01-01T00:00:00Z' id: '2021-04-23_18_13_28-11371076525255784469' location: 'us-central1' name: 'beamapp-jenkins-0424011321-372224' projectId: 'apache-beam-testing' stageStates: [] startTime: '2021-04-24T01:13:29.386600Z' steps: [] tempFiles: [] type: TypeValueValuesEnum(JOB_TYPE_STREAMING, 2)> apache_beam.runners.dataflow.internal.apiclient: INFO: Created job with id: [2021-04-23_18_13_28-11371076525255784469] apache_beam.runners.dataflow.internal.apiclient: INFO: Submitted job: 2021-04-23_18_13_28-11371076525255784469 apache_beam.runners.dataflow.internal.apiclient: INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow/jobs/us-central1/2021-04-23_18_13_28-11371076525255784469?project=apache-beam-testing apache_beam.runners.dataflow.dataflow_runner: INFO: Job 2021-04-23_18_13_28-11371076525255784469 is in state JOB_STATE_RUNNING apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:33.131Z: JOB_MESSAGE_BASIC: Worker configuration: n1-standard-4 in us-central1-f. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.570Z: JOB_MESSAGE_DETAILED: Expanding SplittableParDo operations into optimizable parts. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.593Z: JOB_MESSAGE_DETAILED: Expanding CollectionToSingleton operations into optimizable parts. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.641Z: JOB_MESSAGE_DETAILED: Expanding CoGroupByKey operations into optimizable parts. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.668Z: JOB_MESSAGE_DETAILED: Expanding SplittableProcessKeyed operations into optimizable parts. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.690Z: JOB_MESSAGE_DETAILED: Expanding GroupByKey operations into streaming Read/Write steps apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.709Z: JOB_MESSAGE_DEBUG: Annotating graph with Autotuner information. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.736Z: JOB_MESSAGE_DETAILED: Fusing adjacent ParDo, Read, Write, and Flatten operations apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.759Z: JOB_MESSAGE_DETAILED: Fusing consumer ReadFromPubSub/Map(_from_proto_str) into ReadFromPubSub/Read apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.779Z: JOB_MESSAGE_DETAILED: Fusing consumer add_attribute into ReadFromPubSub/Map(_from_proto_str) apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.802Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/ToProtobuf into add_attribute apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.825Z: JOB_MESSAGE_DETAILED: Fusing consumer WriteToPubSub/Write/NativeWrite into WriteToPubSub/ToProtobuf apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.857Z: JOB_MESSAGE_BASIC: The pubsub read for: projects/apache-beam-testing/subscriptions/psit_subscription_inputb8e51dc9-f8d1-4e19-8633-a0da5d1293f6 is configured to compute input data watermarks based on custom timestamp attribute timestamp. Cloud Dataflow has created an additional tracking subscription to do this, which will be cleaned up automatically. For details, see: https://cloud.google.com/dataflow/model/pubsub-io#timestamps-ids apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.879Z: JOB_MESSAGE_DEBUG: Workflow config is missing a default resource spec. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.907Z: JOB_MESSAGE_DEBUG: Adding StepResource setup and teardown to workflow graph. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.932Z: JOB_MESSAGE_DEBUG: Adding workflow start and stop steps. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:34.950Z: JOB_MESSAGE_DEBUG: Assigning stage ids. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:35.254Z: JOB_MESSAGE_DEBUG: Executing wait step start19 apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:36.487Z: JOB_MESSAGE_DETAILED: Pub/Sub resources set up for topic 'projects/apache-beam-testing/topics/psit_topic_inputb8e51dc9-f8d1-4e19-8633-a0da5d1293f6'. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:36.532Z: JOB_MESSAGE_BASIC: Executing operation ReadFromPubSub/Read+ReadFromPubSub/Map(_from_proto_str)+add_attribute+WriteToPubSub/ToProtobuf+WriteToPubSub/Write/NativeWrite apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:36.562Z: JOB_MESSAGE_DEBUG: Starting worker pool setup. apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:13:36.576Z: JOB_MESSAGE_BASIC: Starting 1 workers in us-central1-f... apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:14:05.872Z: JOB_MESSAGE_BASIC: Your project already contains 100 Dataflow-created metric descriptors, so new user metrics of the form custom.googleapis.com/* will not be created. However, all user metrics are also available in the metric dataflow.googleapis.com/job/user_counter. If you rely on the custom metrics, you can delete old / unused metric descriptors. See https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.list and https://developers.google.com/apis-explorer/#p/monitoring/v3/monitoring.projects.metricDescriptors.delete apache_beam.runners.dataflow.dataflow_runner: INFO: 2021-04-24T01:16:08.112Z: JOB_MESSAGE_DETAILED: Autoscaling: Raised the number of workers to 1 so that the pipeline can catch up with its backlog and keep up with its input rate. apache_beam.runners.dataflow.dataflow_runner: WARNING: Timing out on waiting for job 2021-04-23_18_13_28-11371076525255784469 after 182 seconds google.auth._default: DEBUG: Checking None for explicit credentials as part of auth process... google.auth._default: DEBUG: Checking Cloud SDK credentials as part of auth process... google.auth._default: DEBUG: Cloud SDK credentials not found on disk; not using them google.auth._default: DEBUG: Checking for App Engine runtime as part of auth process... google.auth._default: DEBUG: No App Engine library was found so cannot authentication via App Engine Identity Credentials. google.auth.transport._http_client: DEBUG: Making request: GET http://169.254.169.254 google.auth.transport._http_client: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/project/project-id google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/?recursive=true urllib3.connectionpool: DEBUG: Starting new HTTP connection (1): metadata.google.internal:80 urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/default/?recursive=true HTTP/1.1" 200 144 google.auth.transport.requests: DEBUG: Making request: GET http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub urllib3.connectionpool: DEBUG: http://metadata.google.internal:80 "GET /computeMetadata/v1/instance/service-accounts/[email protected]/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform%2Chttps%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpubsub HTTP/1.1" 200 244 apache_beam.io.gcp.tests.pubsub_matcher: ERROR: Timeout after 300 sec. Received 0 messages from projects/apache-beam-testing/subscriptions/psit_subscription_outputb8e51dc9-f8d1-4e19-8633-a0da5d1293f6. --------------------- >> end captured logging << --------------------- ---------------------------------------------------------------------- XML: nosetests-postCommitIT-df-py38.xml ---------------------------------------------------------------------- XML: <https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/nosetests.xml> ---------------------------------------------------------------------- Ran 69 tests in 5049.645s FAILED (SKIP=6, failures=1) > Task :sdks:python:test-suites:dataflow:py38:postCommitIT FAILED FAILURE: Build failed with an exception. * Where: Script '<https://ci-beam.apache.org/job/beam_PostCommit_Python38/ws/src/sdks/python/test-suites/dataflow/common.gradle'> line: 118 * What went wrong: Execution failed for task ':sdks:python:test-suites:dataflow:py38:postCommitIT'. > Process 'command 'sh'' finished with non-zero exit value 1 * Try: Run with --stacktrace option to get the stack trace. Run with --info or --debug option to get more log output. Run with --scan to get full insights. * Get more help at https://help.gradle.org Deprecated Gradle features were used in this build, making it incompatible with Gradle 7.0. Use '--warning-mode all' to show the individual deprecation warnings. See https://docs.gradle.org/6.8.3/userguide/command_line_interface.html#sec:command_line_warnings BUILD FAILED in 1h 39m 11s 208 actionable tasks: 191 executed, 13 from cache, 4 up-to-date Gradle was unable to watch the file system for changes. The inotify watches limit is too low. Publishing build scan... https://gradle.com/s/vrtop5zsjcqc2 Build step 'Invoke Gradle script' changed build result to FAILURE Build step 'Invoke Gradle script' marked build as failure --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
