emkornfield commented on a change in pull request #15185:
URL: https://github.com/apache/beam/pull/15185#discussion_r679456138
##########
File path: sdks/python/apache_beam/io/gcp/bigquery.py
##########
@@ -1920,45 +2135,59 @@ def __init__(self, gcs_location=None, *args, **kwargs):
gcs_location = StaticValueProvider(str, gcs_location)
self.gcs_location = gcs_location
-
self._args = args
self._kwargs = kwargs
def expand(self, pcoll):
# TODO(BEAM-11115): Make ReadFromBQ rely on ReadAllFromBQ implementation.
- temp_location = pcoll.pipeline.options.view_as(
- GoogleCloudOptions).temp_location
- job_name = pcoll.pipeline.options.view_as(GoogleCloudOptions).job_name
- gcs_location_vp = self.gcs_location
- unique_id = str(uuid.uuid4())[0:10]
-
- def file_path_to_remove(unused_elm):
- gcs_location = bigquery_export_destination_uri(
- gcs_location_vp, temp_location, unique_id, True)
- return gcs_location + '/'
-
- files_to_remove_pcoll = beam.pvalue.AsList(
- pcoll.pipeline
- | 'FilesToRemoveImpulse' >> beam.Create([None])
- | 'MapFilesToRemove' >> beam.Map(file_path_to_remove))
+ if self.method is ReadFromBigQuery.Method.EXPORT:
Review comment:
it would probably make this diff easier to understand if you renamed
this method (_expand_export) and created a new expand method to switch between
the two (and a separate new method for expanding direct read.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]