ahmedabu98 commented on code in PR #23012:
URL: https://github.com/apache/beam/pull/23012#discussion_r967563357
##########
sdks/python/apache_beam/io/gcp/bigquery_file_loads.py:
##########
@@ -430,20 +434,36 @@ def process(self, element, schema_mod_job_name_prefix):
table_reference)
# Trigger potential schema modification by loading zero rows into the
# destination table with the temporary table schema.
- schema_update_job_reference = self._bq_wrapper.perform_load_job(
- destination=table_reference,
- source_stream=io.BytesIO(), # file with zero rows
- job_id=job_name,
- schema=temp_table_schema,
- write_disposition='WRITE_APPEND',
- create_disposition='CREATE_NEVER',
- additional_load_parameters=additional_parameters,
- job_labels=self._bq_io_metadata.add_additional_bq_job_labels(),
- # JSON format is hardcoded because zero rows load(unlike AVRO) and
- # a nested schema(unlike CSV, which a default one) is permitted.
- source_format="NEWLINE_DELIMITED_JSON",
- load_job_project_id=self._load_job_project_id)
- yield (destination, schema_update_job_reference)
+ schema_update_job_reference = self.bq_wrapper.perform_load_job(
+ destination=table_reference,
+ source_stream=io.BytesIO(), # file with zero rows
+ job_id=job_name,
+ schema=temp_table_schema,
+ write_disposition='WRITE_APPEND',
+ create_disposition='CREATE_NEVER',
+ additional_load_parameters=additional_parameters,
+ job_labels=self._bq_io_metadata.add_additional_bq_job_labels(),
+ # JSON format is hardcoded because zero rows load(unlike AVRO) and
+ # a nested schema(unlike CSV, which a default one) is permitted.
+ source_format="NEWLINE_DELIMITED_JSON",
+ load_job_project_id=self._load_job_project_id)
+ self.pending_jobs.append(
+ GlobalWindows.windowed_value(
+ (destination, schema_update_job_reference)))
+
+ def finish_bundle(self):
+ # Unlike the other steps, schema update is not always necessary.
+ # In that case, return a None value to avoid blocking in streaming context.
+ # Otherwise, the streaming pipeline would get stuck waiting for the
+ # TriggerCopyJobs side-input.
+ if not self.pending_jobs:
+ return [GlobalWindows.windowed_value(None)]
+
+ for windowed_value in self.pending_jobs:
+ job_ref = windowed_value.value[1]
Review Comment:
Hmmm I'm not sure I follow, what stands out as weird to you?
FYI Java implementation is somewhat like this. Jobs are submitted and
accumulated as they are pending. Then they are[ looped over and waited for in
finishBundle](https://github.com/apache/beam/blob/master/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/WriteTables.java#L344).
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]