This is an automated email from the ASF dual-hosted git repository.

ahmedabualsaud pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 2f788f674f3 use standard sql (#33278)
2f788f674f3 is described below

commit 2f788f674f377f857748c761abe98397038a8979
Author: Ahmed Abualsaud <[email protected]>
AuthorDate: Wed Dec 4 11:46:47 2024 -0500

    use standard sql (#33278)
---
 .../providers/BigQueryDirectReadSchemaTransformProvider.java   |  2 +-
 .../beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java  | 10 ++++++----
 2 files changed, 7 insertions(+), 5 deletions(-)

diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryDirectReadSchemaTransformProvider.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryDirectReadSchemaTransformProvider.java
index 15b1b01d7f6..073de40038b 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryDirectReadSchemaTransformProvider.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryDirectReadSchemaTransformProvider.java
@@ -233,7 +233,7 @@ public class BigQueryDirectReadSchemaTransformProvider
           read = read.withSelectedFields(configuration.getSelectedFields());
         }
       } else {
-        read = read.fromQuery(configuration.getQuery());
+        read = read.fromQuery(configuration.getQuery()).usingStandardSql();
       }
       if (!Strings.isNullOrEmpty(configuration.getKmsKey())) {
         read = read.withKmsKey(configuration.getKmsKey());
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java
index 16ce2f049dc..6a422f1832d 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/providers/BigQueryManagedIT.java
@@ -96,8 +96,8 @@ public class BigQueryManagedIT {
   @Test
   public void testBatchFileLoadsWriteRead() {
     String table =
-        String.format("%s:%s.%s", PROJECT, BIG_QUERY_DATASET_ID, 
testName.getMethodName());
-    Map<String, Object> config = ImmutableMap.of("table", table);
+        String.format("%s.%s.%s", PROJECT, BIG_QUERY_DATASET_ID, 
testName.getMethodName());
+    Map<String, Object> writeConfig = ImmutableMap.of("table", table);
 
     // file loads requires a GCS temp location
     String tempLocation = 
writePipeline.getOptions().as(TestPipelineOptions.class).getTempRoot();
@@ -105,13 +105,15 @@ public class BigQueryManagedIT {
 
     // batch write
     PCollectionRowTuple.of("input", getInput(writePipeline, false))
-        .apply(Managed.write(Managed.BIGQUERY).withConfig(config));
+        .apply(Managed.write(Managed.BIGQUERY).withConfig(writeConfig));
     writePipeline.run().waitUntilFinish();
 
+    Map<String, Object> readConfig =
+        ImmutableMap.of("query", String.format("SELECT * FROM `%s`", table));
     // read and validate
     PCollection<Row> outputRows =
         readPipeline
-            .apply(Managed.read(Managed.BIGQUERY).withConfig(config))
+            .apply(Managed.read(Managed.BIGQUERY).withConfig(readConfig))
             .getSinglePCollection();
     PAssert.that(outputRows).containsInAnyOrder(ROWS);
     readPipeline.run().waitUntilFinish();

Reply via email to