This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 8fbf466d50 Resolving google bigquery deprecated warnings (#39841)
8fbf466d50 is described below
commit 8fbf466d505fc3c584b56e3d354baf19eae94a74
Author: Gopal Dirisala <[email protected]>
AuthorDate: Sun May 26 19:21:37 2024 +0530
Resolving google bigquery deprecated warnings (#39841)
* Resolving google bigquery deprecated warnings
* Resolving google bigquery deprecated warnings
---
tests/always/test_example_dags.py | 1 -
tests/always/test_project_structure.py | 2 ++
.../providers/google/cloud/bigquery/example_bigquery_sensors.py | 6 ++----
3 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/tests/always/test_example_dags.py
b/tests/always/test_example_dags.py
index fab3bdb7c8..7d2ec685f8 100644
--- a/tests/always/test_example_dags.py
+++ b/tests/always/test_example_dags.py
@@ -49,7 +49,6 @@ IGNORE_AIRFLOW_PROVIDER_DEPRECATION_WARNING: tuple[str, ...]
= (
# and a corresponding Issue should be created on GitHub.
"tests/system/providers/amazon/aws/example_emr_notebook_execution.py",
"tests/system/providers/google/cloud/bigquery/example_bigquery_operations.py",
- "tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py",
"tests/system/providers/google/cloud/dataproc/example_dataproc_gke.py",
"tests/system/providers/google/cloud/gcs/example_gcs_sensor.py",
"tests/system/providers/google/cloud/gcs/example_gcs_to_gcs.py",
diff --git a/tests/always/test_project_structure.py
b/tests/always/test_project_structure.py
index 11af794670..f382f0f5ae 100644
--- a/tests/always/test_project_structure.py
+++ b/tests/always/test_project_structure.py
@@ -389,6 +389,8 @@ class
TestGoogleProviderProjectStructure(ExampleCoverageTest, AssetsCoverageTest
"airflow.providers.google.cloud.operators.bigquery.BigQueryPatchDatasetOperator",
"airflow.providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator",
"airflow.providers.google.cloud.operators.bigquery.BigQueryExecuteQueryOperator",
+
"airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistenceAsyncSensor",
+
"airflow.providers.google.cloud.sensors.bigquery.BigQueryTableExistencePartitionAsyncSensor",
"airflow.providers.google.cloud.sensors.cloud_composer.CloudComposerEnvironmentSensor",
"airflow.providers.google.marketing_platform.operators.GoogleDisplayVideo360CreateQueryOperator",
"airflow.providers.google.marketing_platform.operators.GoogleDisplayVideo360RunQueryOperator",
diff --git
a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py
b/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py
index a780615e36..662e491e02 100644
--- a/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py
+++ b/tests/system/providers/google/cloud/bigquery/example_bigquery_sensors.py
@@ -32,8 +32,6 @@ from airflow.providers.google.cloud.operators.bigquery import
(
BigQueryInsertJobOperator,
)
from airflow.providers.google.cloud.sensors.bigquery import (
- BigQueryTableExistenceAsyncSensor,
- BigQueryTableExistencePartitionAsyncSensor,
BigQueryTableExistenceSensor,
BigQueryTablePartitionExistenceSensor,
)
@@ -98,7 +96,7 @@ with DAG(
# [END howto_sensor_bigquery_table_defered]
# [START howto_sensor_async_bigquery_table]
- check_table_exists_async = BigQueryTableExistenceAsyncSensor(
+ check_table_exists_async = BigQueryTableExistenceSensor(
task_id="check_table_exists_async",
project_id=PROJECT_ID,
dataset_id=DATASET_NAME,
@@ -138,7 +136,7 @@ with DAG(
# [END howto_sensor_bigquery_table_partition_defered]
# [START howto_sensor_bigquery_table_partition_async]
- check_table_partition_exists_async =
BigQueryTableExistencePartitionAsyncSensor(
+ check_table_partition_exists_async = BigQueryTablePartitionExistenceSensor(
task_id="check_table_partition_exists_async",
partition_id=PARTITION_NAME,
project_id=PROJECT_ID,