satybald commented on a change in pull request #15602:
URL: https://github.com/apache/beam/pull/15602#discussion_r718904743
##########
File path: sdks/python/apache_beam/io/gcp/bigquery.py
##########
@@ -887,6 +899,380 @@ def _export_files(self, bq):
return table.schema, metadata_list
+class _CustomBigQueryStorageSource(BoundedSource):
+ """A base class for BoundedSource implementations which read from BigQuery
+ using the BigQuery Storage API.
+ Args:
+ table (str, TableReference): The ID of the table. The ID must contain only
+ letters ``a-z``, ``A-Z``, numbers ``0-9``, or underscores ``_`` If
+ **dataset** argument is :data:`None` then the table argument must
+ contain the entire table reference specified as:
+ ``'PROJECT:DATASET.TABLE'`` or must specify a TableReference.
+ dataset (str): Optional ID of the dataset containing this table or
+ :data:`None` if the table argument specifies a TableReference.
+ project (str): Optional ID of the project containing this table or
+ :data:`None` if the table argument specifies a TableReference.
+ selected_fields (List[str]): Optional List of names of the fields in the
+ table that should be read. If empty, all fields will be read. If the
+ specified field is a nested field, all the sub-fields in the field will
be
+ selected. The output field order is unrelated to the order of fields in
+ selected_fields.
+ row_restriction (str): Optional SQL text filtering statement, similar to a
+ WHERE clause in a query. Aggregates are not supported. Restricted to a
+ maximum length for 1 MB.
+ use_native_datetime (bool): If :data:`True`, BigQuery DATETIME fields will
+ be returned as native Python datetime objects. If :data:`False`,
+ DATETIME fields will be returned as formatted strings (for example:
+ 2021-01-01T12:59:59). The default is :data:`False`.
+ """
+
+ # The maximum number of streams which will be requested when creating a read
+ # session, regardless of the desired bundle size.
+ MAX_SPLIT_COUNT = 10000
+ # The minimum number of streams which will be requested when creating a read
+ # session, regardless of the desired bundle size. Note that the server may
+ # still choose to return fewer than ten streams based on the layout of the
+ # table.
+ MIN_SPLIT_COUNT = 10
+
+ def __init__(
+ self,
+ table: Optional[Union[str, TableReference]] = None,
+ dataset: Optional[str] = None,
+ project: Optional[str] = None,
+ query: Optional[str] = None,
Review comment:
Thank you for adding `query` parameter :100: This will make RFBQ with
Storage API so much more useful
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]