[
https://issues.apache.org/jira/browse/BEAM-5630?focusedWorklogId=153002&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-153002
]
ASF GitHub Bot logged work on BEAM-5630:
----------------------------------------
Author: ASF GitHub Bot
Created on: 10/Oct/18 00:46
Start Date: 10/Oct/18 00:46
Worklog Time Spent: 10m
Work Description: jasonkuster closed pull request #6559: [BEAM-5630] add
more tests into BigQueryIOReadIT
URL: https://github.com/apache/beam/pull/6559
This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:
As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):
diff --git a/sdks/java/io/google-cloud-platform/build.gradle
b/sdks/java/io/google-cloud-platform/build.gradle
index 7ee0d66c41a..4e8c5fb8cce 100644
--- a/sdks/java/io/google-cloud-platform/build.gradle
+++ b/sdks/java/io/google-cloud-platform/build.gradle
@@ -97,6 +97,7 @@ task integrationTest(type: Test) {
outputs.upToDateWhen { false }
include '**/*IT.class'
+ exclude '**/BigQueryIOReadIT.class'
exclude '**/BigQueryToTableIT.class'
maxParallelForks 4
classpath = sourceSets.test.runtimeClasspath
diff --git
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
index 61141efd8a1..637a03b9944 100644
---
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
+++
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOReadIT.java
@@ -18,11 +18,13 @@
package org.apache.beam.sdk.io.gcp.bigquery;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.extensions.gcp.options.GcpOptions;
import org.apache.beam.sdk.options.Description;
+import org.apache.beam.sdk.options.ExperimentalOptions;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.options.Validation;
import org.apache.beam.sdk.testing.PAssert;
@@ -45,10 +47,15 @@
private String project;
private static final String datasetId = "big_query_import_export";
private static final String tablePrefix = "export_";
- private static final Map<String, Integer> numOfRecords =
ImmutableMap.of("empty", 0, "1K", 1000);
+ private static final Map<String, Long> numOfRecords =
+ ImmutableMap.<String, Long>of(
+ "empty", 0L,
+ "1M", 10592L,
+ "1G", 11110839L,
+ "1T", 11110839000L);
/** Customized PipelineOption for BigQueryIORead Pipeline. */
- public interface BigQueryIOReadOptions extends TestPipelineOptions {
+ public interface BigQueryIOReadOptions extends TestPipelineOptions,
ExperimentalOptions {
@Description("The table to be read")
@Validation.Required
String getInputTable();
@@ -62,13 +69,17 @@
void setNumRecords(long numRecords);
}
- private void setupTestEnvironment(String recordSize) {
+ private void setupTestEnvironment(String recordSize, boolean
enableCustomBigquery) {
PipelineOptionsFactory.register(BigQueryIOReadOptions.class);
options =
TestPipeline.testingPipelineOptions().as(BigQueryIOReadOptions.class);
options.setNumRecords(numOfRecords.get(recordSize));
options.setTempLocation(options.getTempRoot() + "/temp-it/");
project =
TestPipeline.testingPipelineOptions().as(GcpOptions.class).getProject();
options.setInputTable(project + ":" + datasetId + "." + tablePrefix +
recordSize);
+ if (enableCustomBigquery) {
+ options.setExperiments(
+ ImmutableList.of("enable_custom_bigquery_sink",
"enable_custom_bigquery_source"));
+ }
}
private void runBigQueryIOReadPipeline() {
@@ -83,13 +94,37 @@ private void runBigQueryIOReadPipeline() {
@Test
public void testBigQueryReadEmpty() throws Exception {
- setupTestEnvironment("empty");
+ setupTestEnvironment("empty", false);
runBigQueryIOReadPipeline();
}
@Test
- public void testBigQueryRead1K() throws Exception {
- setupTestEnvironment("1K");
+ public void testBigQueryRead1M() throws Exception {
+ setupTestEnvironment("1M", false);
+ runBigQueryIOReadPipeline();
+ }
+
+ @Test
+ public void testBigQueryRead1G() throws Exception {
+ setupTestEnvironment("1G", false);
+ runBigQueryIOReadPipeline();
+ }
+
+ @Test
+ public void testBigQueryRead1T() throws Exception {
+ setupTestEnvironment("1T", false);
+ runBigQueryIOReadPipeline();
+ }
+
+ @Test
+ public void testBigQueryReadEmptyCustom() throws Exception {
+ setupTestEnvironment("empty", true);
+ runBigQueryIOReadPipeline();
+ }
+
+ @Test
+ public void testBigQueryRead1TCustom() throws Exception {
+ setupTestEnvironment("1T", true);
runBigQueryIOReadPipeline();
}
}
diff --git a/sdks/python/apache_beam/io/gcp/bigquery_io_read_it_test.py
b/sdks/python/apache_beam/io/gcp/bigquery_io_read_it_test.py
index b9b3b41c324..8851a143971 100644
--- a/sdks/python/apache_beam/io/gcp/bigquery_io_read_it_test.py
+++ b/sdks/python/apache_beam/io/gcp/bigquery_io_read_it_test.py
@@ -38,7 +38,10 @@ class BigqueryIOReadIT(unittest.TestCase):
DEFAULT_DATASET = "big_query_import_export"
DEFAULT_TABLE_PREFIX = "export_"
- NUM_RECORDS = {"1K": 1000,}
+ NUM_RECORDS = {"empty": 0,
+ "1M": 10592,
+ "1G": 11110839,
+ "1T": 11110839000,}
def run_bigquery_io_read_pipeline(self, input_size):
test_pipeline = TestPipeline(is_integration_test=True)
@@ -51,8 +54,8 @@ def run_bigquery_io_read_pipeline(self, input_size):
**extra_opts))
@attr('IT')
- def test_1K_table(self):
- self.run_bigquery_io_read_pipeline('1K')
+ def bigquery_read_1M_python(self):
+ self.run_bigquery_io_read_pipeline('1M')
if __name__ == '__main__':
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
Issue Time Tracking
-------------------
Worklog Id: (was: 153002)
Time Spent: 3h 50m (was: 3h 40m)
> supplement Bigquery Read IT test cases and blacklist them in post-commit
> ------------------------------------------------------------------------
>
> Key: BEAM-5630
> URL: https://issues.apache.org/jira/browse/BEAM-5630
> Project: Beam
> Issue Type: Bug
> Components: testing
> Reporter: yifan zou
> Assignee: yifan zou
> Priority: Major
> Time Spent: 3h 50m
> Remaining Estimate: 0h
>
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)