This is an automated email from the ASF dual-hosted git repository.

pabloem pushed a commit to branch pabloem-patch-2
in repository https://gitbox.apache.org/repos/asf/beam.git

commit e1571c36828b0ec75b6eeb517513b616cfb3890c
Author: Pablo <[email protected]>
AuthorDate: Tue Mar 30 11:49:29 2021 -0700

    Documenting temp_dataset parameter in ReadFromBigQuery
---
 sdks/python/apache_beam/io/gcp/bigquery.py | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/sdks/python/apache_beam/io/gcp/bigquery.py 
b/sdks/python/apache_beam/io/gcp/bigquery.py
index 3097d02..ae2d59e 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery.py
@@ -1911,6 +1911,10 @@ class ReadFromBigQuery(PTransform):
       To learn more about type conversions between BigQuery and Avro, see:
       https://cloud.google.com/bigquery/docs/loading-data-cloud-storage-avro\
               #avro_conversions
+    temp_dataset (``google.cloud.bigquery.dataset.DatasetReference``):
+        The dataset in which to create temporary tables when performing file
+        loads. By default, a new dataset is created in the execution project 
for
+        temporary tables.
    """
 
   COUNTER = 0

Reply via email to