This is an automated email from the ASF dual-hosted git repository.
potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 5007806 Add possibility to run DAGs from system tests and see DAGs
logs (#17868)
5007806 is described below
commit 500780651cfef9254d5e365c0de6f8c7af6d05bf
Author: deedmitrij <[email protected]>
AuthorDate: Tue Aug 31 21:14:29 2021 +0300
Add possibility to run DAGs from system tests and see DAGs logs (#17868)
---
.../google/cloud/example_dags/example_automl_tables.py | 8 ++++----
.../cloud/example_dags/example_azure_fileshare_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_bigquery_dts.py | 2 +-
.../google/cloud/example_dags/example_bigquery_operations.py | 6 +++---
.../google/cloud/example_dags/example_bigquery_queries.py | 2 +-
.../google/cloud/example_dags/example_bigquery_sensors.py | 2 +-
.../cloud/example_dags/example_bigquery_to_bigquery.py | 2 +-
.../providers/google/cloud/example_dags/example_bigtable.py | 2 +-
.../google/cloud/example_dags/example_cloud_build.py | 2 +-
.../google/cloud/example_dags/example_cloud_memorystore.py | 4 ++--
.../providers/google/cloud/example_dags/example_cloud_sql.py | 2 +-
.../google/cloud/example_dags/example_cloud_sql_query.py | 2 +-
.../example_cloud_storage_transfer_service_gcp.py | 2 +-
.../providers/google/cloud/example_dags/example_compute.py | 2 +-
.../google/cloud/example_dags/example_compute_igm.py | 2 +-
.../google/cloud/example_dags/example_compute_ssh.py | 2 +-
.../google/cloud/example_dags/example_datacatalog.py | 2 +-
.../providers/google/cloud/example_dags/example_dataflow.py | 8 ++++----
.../cloud/example_dags/example_dataflow_flex_template.py | 2 +-
.../google/cloud/example_dags/example_dataflow_sql.py | 2 +-
.../google/cloud/example_dags/example_datafusion.py | 2 +-
.../providers/google/cloud/example_dags/example_dataprep.py | 2 +-
.../providers/google/cloud/example_dags/example_dataproc.py | 2 +-
.../providers/google/cloud/example_dags/example_datastore.py | 4 ++--
airflow/providers/google/cloud/example_dags/example_dlp.py | 8 ++++----
.../google/cloud/example_dags/example_facebook_ads_to_gcs.py | 2 +-
.../providers/google/cloud/example_dags/example_functions.py | 2 +-
airflow/providers/google/cloud/example_dags/example_gcs.py | 4 ++--
.../example_dags/example_gcs_timespan_file_transform.py | 2 +-
.../google/cloud/example_dags/example_gcs_to_bigquery.py | 2 +-
.../google/cloud/example_dags/example_gcs_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_gcs_to_local.py | 2 +-
.../google/cloud/example_dags/example_gcs_to_sftp.py | 2 +-
.../google/cloud/example_dags/example_gdrive_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_kubernetes_engine.py | 2 +-
.../google/cloud/example_dags/example_life_sciences.py | 2 +-
.../google/cloud/example_dags/example_local_to_gcs.py | 2 +-
.../providers/google/cloud/example_dags/example_mlengine.py | 2 +-
.../google/cloud/example_dags/example_mysql_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_natural_language.py | 2 +-
.../google/cloud/example_dags/example_postgres_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_presto_to_gcs.py | 2 +-
.../providers/google/cloud/example_dags/example_pubsub.py | 4 ++--
.../providers/google/cloud/example_dags/example_s3_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_salesforce_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_sftp_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_sheets_to_gcs.py | 2 +-
.../providers/google/cloud/example_dags/example_spanner.py | 2 +-
.../google/cloud/example_dags/example_speech_to_text.py | 2 +-
.../google/cloud/example_dags/example_stackdriver.py | 2 +-
airflow/providers/google/cloud/example_dags/example_tasks.py | 2 +-
.../google/cloud/example_dags/example_text_to_speech.py | 2 +-
.../providers/google/cloud/example_dags/example_translate.py | 2 +-
.../google/cloud/example_dags/example_translate_speech.py | 2 +-
.../google/cloud/example_dags/example_trino_to_gcs.py | 2 +-
.../google/cloud/example_dags/example_video_intelligence.py | 2 +-
.../providers/google/cloud/example_dags/example_vision.py | 6 +++---
.../providers/google/cloud/example_dags/example_workflows.py | 2 +-
.../google/firebase/example_dags/example_firestore.py | 2 +-
.../providers/google/leveldb/example_dags/example_leveldb.py | 2 +-
.../marketing_platform/example_dags/example_analytics.py | 2 +-
.../example_dags/example_campaign_manager.py | 2 +-
.../marketing_platform/example_dags/example_display_video.py | 6 +++---
.../marketing_platform/example_dags/example_search_ads.py | 2 +-
.../google/suite/example_dags/example_gcs_to_sheets.py | 2 +-
.../providers/google/suite/example_dags/example_sheets.py | 2 +-
tests/providers/google/cloud/hooks/test_bigquery_system.py | 4 ++++
tests/providers/google/cloud/hooks/test_kms_system.py | 6 ++++++
.../google/cloud/hooks/test_secret_manager_system.py | 6 ++++++
.../google/cloud/log/test_gcs_task_handler_system.py | 2 ++
.../google/cloud/log/test_stackdriver_task_handler_system.py | 11 ++++++++---
tests/providers/google/cloud/operators/test_automl_system.py | 12 ++++++++++++
.../providers/google/cloud/operators/test_bigtable_system.py | 3 +++
.../google/cloud/operators/test_cloud_sql_system.py | 9 +++++++++
.../operators/test_cloud_storage_transfer_service_system.py | 6 ++++++
.../google/cloud/operators/test_datacatalog_system.py | 6 ++++++
.../providers/google/cloud/operators/test_dataflow_system.py | 4 ++++
.../google/cloud/operators/test_datafusion_system.py | 2 ++
.../providers/google/cloud/operators/test_dataprep_system.py | 1 +
tests/providers/google/cloud/operators/test_dlp_system.py | 6 ++++++
.../google/cloud/operators/test_functions_system.py | 6 ++++++
.../google/cloud/operators/test_kubernetes_engine_system.py | 6 ++++++
.../google/cloud/operators/test_natural_language_system.py | 6 ++++++
tests/providers/google/cloud/operators/test_pubsub_system.py | 6 ++++++
.../providers/google/cloud/operators/test_spanner_system.py | 3 +++
.../google/cloud/operators/test_stackdriver_system.py | 6 ++++++
tests/providers/google/cloud/operators/test_tasks_system.py | 6 ++++++
.../google/cloud/operators/test_translate_system.py | 6 ++++++
.../google/cloud/operators/test_video_intelligence_system.py | 2 +-
.../google/cloud/operators/test_workflows_system.py | 6 ++++++
.../google/cloud/secrets/test_secret_manager_system.py | 4 ++++
.../cloud/transfers/test_azure_fileshare_to_gcs_system.py | 6 ++++++
.../cloud/transfers/test_bigquery_to_bigquery_system.py | 6 ++++++
.../cloud/transfers/test_facebook_ads_to_gcs_system.py | 6 ++++++
.../google/cloud/transfers/test_gcs_to_bigquery_system.py | 6 ++++++
.../google/cloud/transfers/test_salesforce_to_gcs_system.py | 6 ++++++
.../google/leveldb/operators/test_leveldb_system.py | 6 ++++++
.../marketing_platform/operators/test_analytics_system.py | 6 ++++++
tests/providers/google/suite/operators/test_sheets_system.py | 6 ++++++
99 files changed, 264 insertions(+), 89 deletions(-)
diff --git
a/airflow/providers/google/cloud/example_dags/example_automl_tables.py
b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
index 9107715..acdf70f 100644
--- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py
+++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py
@@ -84,7 +84,7 @@ def get_target_column_spec(columns_specs: List[Dict],
column_name: str) -> str:
# Example DAG to create dataset, train model_id and deploy it.
with models.DAG(
"example_create_and_deploy",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
user_defined_macros={
"get_target_column_spec": get_target_column_spec,
@@ -196,7 +196,7 @@ with models.DAG(
# Example DAG for AutoML datasets operations
with models.DAG(
"example_automl_dataset",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
user_defined_macros={"extract_object_id": extract_object_id},
) as example_dag:
@@ -264,7 +264,7 @@ with models.DAG(
with models.DAG(
"example_gcp_get_deploy",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as get_deploy_dag:
@@ -289,7 +289,7 @@ with models.DAG(
with models.DAG(
"example_gcp_predict",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as predict_dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py
index aa49c86..ef6fc64 100644
---
a/airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py
+++
b/airflow/providers/google/cloud/example_dags/example_azure_fileshare_to_gcs.py
@@ -36,7 +36,7 @@ with DAG(
'retries': 1,
'retry_delay': timedelta(minutes=5),
},
- schedule_interval=None,
+ schedule_interval='@once',
start_date=datetime(2018, 11, 1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
index c6991af..cbe63c1 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py
@@ -64,7 +64,7 @@ TRANSFER_CONFIG = {
with models.DAG(
"example_gcp_bigquery_dts",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
index e3e96fe..f813d9b 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_operations.py
@@ -57,7 +57,7 @@ DATA_SAMPLE_GCS_OBJECT_NAME =
DATA_SAMPLE_GCS_URL_PARTS.path[1:]
with models.DAG(
"example_bigquery_operations",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
@@ -238,10 +238,10 @@ with models.DAG(
with models.DAG(
"example_bigquery_operations_location",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
-):
+) as dag_with_location:
create_dataset_with_location = BigQueryCreateEmptyDatasetOperator(
task_id="create_dataset_with_location",
dataset_id=LOCATION_DATASET_NAME,
diff --git
a/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py
b/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py
index 06946fa..8c1e3c5 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_queries.py
@@ -65,7 +65,7 @@ for location in [None, LOCATION]:
with models.DAG(
dag_id,
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
user_defined_macros={"DATASET": DATASET_NAME, "TABLE": TABLE_1},
diff --git
a/airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py
b/airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py
index cde6cd9..dcb3dfd 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigquery_sensors.py
@@ -54,7 +54,7 @@ dag_id = "example_bigquery_sensors"
with models.DAG(
dag_id,
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
user_defined_macros={"DATASET": DATASET_NAME, "TABLE": TABLE_NAME},
diff --git
a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
index 6cd4c64..7aceceb 100644
---
a/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
+++
b/airflow/providers/google/cloud/example_dags/example_bigquery_to_bigquery.py
@@ -37,7 +37,7 @@ TARGET = "target"
with models.DAG(
"example_bigquery_to_bigquery",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_bigtable.py
b/airflow/providers/google/cloud/example_dags/example_bigtable.py
index 2f3cfd0..53cf840 100644
--- a/airflow/providers/google/cloud/example_dags/example_bigtable.py
+++ b/airflow/providers/google/cloud/example_dags/example_bigtable.py
@@ -80,7 +80,7 @@ CBT_POKE_INTERVAL = getenv('GCP_BIG_TABLE_POKE_INTERVAL',
'60')
with models.DAG(
'example_gcp_bigtable_operators',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_build.py
b/airflow/providers/google/cloud/example_dags/example_cloud_build.py
index 1591fe0..7fb4680 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_build.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_build.py
@@ -77,7 +77,7 @@ create_build_from_repo_body = {
with models.DAG(
"example_gcp_cloud_build",
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_create_build_from_storage]
diff --git
a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
index 1b75e34..f2dbe8c 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py
@@ -79,7 +79,7 @@ MEMCACHED_INSTANCE = {"name": "", "node_count": 1,
"node_config": {"cpu_count":
with models.DAG(
"gcp_cloud_memorystore_redis",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
tags=['example'],
) as dag:
@@ -255,7 +255,7 @@ with models.DAG(
with models.DAG(
"gcp_cloud_memorystore_memcached",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
tags=['example'],
) as dag_memcache:
diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
index 39a8f2e..d0ce25c 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql.py
@@ -138,7 +138,7 @@ db_patch_body = {"charset": "utf16", "collation":
"utf16_general_ci"}
with models.DAG(
'example_gcp_sql',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
index 4d40500..c0c26f5 100644
--- a/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
+++ b/airflow/providers/google/cloud/example_dags/example_cloud_sql_query.py
@@ -268,7 +268,7 @@ tasks = []
with models.DAG(
dag_id='example_gcp_sql_query',
- schedule_interval=None,
+ schedule_interval='@once',
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py
b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py
index 8e851df..f80f067 100644
---
a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py
+++
b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py
@@ -101,7 +101,7 @@ update_body = {
with models.DAG(
"example_gcp_transfer",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_compute.py
b/airflow/providers/google/cloud/example_dags/example_compute.py
index d2b336e..820576f 100644
--- a/airflow/providers/google/cloud/example_dags/example_compute.py
+++ b/airflow/providers/google/cloud/example_dags/example_compute.py
@@ -51,7 +51,7 @@ GCE_SHORT_MACHINE_TYPE_NAME =
os.environ.get('GCE_SHORT_MACHINE_TYPE_NAME', 'n1-
with models.DAG(
'example_gcp_compute',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_compute_igm.py
b/airflow/providers/google/cloud/example_dags/example_compute_igm.py
index ac3322c..0370d8a 100644
--- a/airflow/providers/google/cloud/example_dags/example_compute_igm.py
+++ b/airflow/providers/google/cloud/example_dags/example_compute_igm.py
@@ -91,7 +91,7 @@ UPDATE_POLICY = {
with models.DAG(
'example_gcp_compute_igm',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_compute_ssh.py
b/airflow/providers/google/cloud/example_dags/example_compute_ssh.py
index 10aaa59..62636fc 100644
--- a/airflow/providers/google/cloud/example_dags/example_compute_ssh.py
+++ b/airflow/providers/google/cloud/example_dags/example_compute_ssh.py
@@ -31,7 +31,7 @@ GCE_INSTANCE = os.environ.get('GCE_INSTANCE',
'target-instance')
with models.DAG(
'example_compute_ssh',
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag:
# # [START howto_execute_command_on_remote1]
diff --git a/airflow/providers/google/cloud/example_dags/example_datacatalog.py
b/airflow/providers/google/cloud/example_dags/example_datacatalog.py
index 3457805..cd9b66d 100644
--- a/airflow/providers/google/cloud/example_dags/example_datacatalog.py
+++ b/airflow/providers/google/cloud/example_dags/example_datacatalog.py
@@ -58,7 +58,7 @@ FIELD_NAME_1 = "first"
FIELD_NAME_2 = "second"
FIELD_NAME_3 = "first-rename"
-with models.DAG("example_gcp_datacatalog", start_date=days_ago(1),
schedule_interval=None) as dag:
+with models.DAG("example_gcp_datacatalog", schedule_interval='@once',
start_date=days_ago(1)) as dag:
# Create
# [START howto_operator_gcp_datacatalog_create_entry_group]
create_entry_group = CloudDataCatalogCreateEntryGroupOperator(
diff --git a/airflow/providers/google/cloud/example_dags/example_dataflow.py
b/airflow/providers/google/cloud/example_dags/example_dataflow.py
index 1761cba..531ea5a 100644
--- a/airflow/providers/google/cloud/example_dags/example_dataflow.py
+++ b/airflow/providers/google/cloud/example_dags/example_dataflow.py
@@ -62,7 +62,7 @@ default_args = {
with models.DAG(
"example_gcp_dataflow_native_java",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag_native_java:
@@ -111,7 +111,7 @@ with models.DAG(
"example_gcp_dataflow_native_python",
default_args=default_args,
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag_native_python:
@@ -146,7 +146,7 @@ with models.DAG(
"example_gcp_dataflow_native_python_async",
default_args=default_args,
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag_native_python_async:
# [START howto_operator_start_python_job_async]
@@ -239,7 +239,7 @@ with models.DAG(
"example_gcp_dataflow_template",
default_args=default_args,
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag_template:
# [START howto_operator_start_template_job]
diff --git
a/airflow/providers/google/cloud/example_dags/example_dataflow_flex_template.py
b/airflow/providers/google/cloud/example_dags/example_dataflow_flex_template.py
index d67550c..3938af8 100644
---
a/airflow/providers/google/cloud/example_dags/example_dataflow_flex_template.py
+++
b/airflow/providers/google/cloud/example_dags/example_dataflow_flex_template.py
@@ -46,7 +46,7 @@ BQ_FLEX_TEMPLATE_LOCATION =
os.environ.get('GCP_DATAFLOW_BQ_FLEX_TEMPLATE_LOCATI
with models.DAG(
dag_id="example_gcp_dataflow_flex_template_java",
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
) as dag_flex_template:
# [START howto_operator_start_template_job]
start_flex_template = DataflowStartFlexTemplateOperator(
diff --git
a/airflow/providers/google/cloud/example_dags/example_dataflow_sql.py
b/airflow/providers/google/cloud/example_dags/example_dataflow_sql.py
index 8bf1ecf..0d03119 100644
--- a/airflow/providers/google/cloud/example_dags/example_dataflow_sql.py
+++ b/airflow/providers/google/cloud/example_dags/example_dataflow_sql.py
@@ -37,7 +37,7 @@ DATAFLOW_SQL_LOCATION =
os.environ.get("GCP_DATAFLOW_SQL_LOCATION", "us-west1")
with models.DAG(
dag_id="example_gcp_dataflow_sql",
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag_sql:
# [START howto_operator_start_sql_job]
diff --git a/airflow/providers/google/cloud/example_dags/example_datafusion.py
b/airflow/providers/google/cloud/example_dags/example_datafusion.py
index 70fc60a..8b5398f 100644
--- a/airflow/providers/google/cloud/example_dags/example_datafusion.py
+++ b/airflow/providers/google/cloud/example_dags/example_datafusion.py
@@ -147,7 +147,7 @@ PIPELINE = {
with models.DAG(
"example_data_fusion",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
) as dag:
# [START howto_cloud_data_fusion_create_instance_operator]
diff --git a/airflow/providers/google/cloud/example_dags/example_dataprep.py
b/airflow/providers/google/cloud/example_dags/example_dataprep.py
index b3b6540..b155681 100644
--- a/airflow/providers/google/cloud/example_dags/example_dataprep.py
+++ b/airflow/providers/google/cloud/example_dags/example_dataprep.py
@@ -52,7 +52,7 @@ DATA = {
with models.DAG(
"example_dataprep",
- schedule_interval=None,
+ schedule_interval='@once',
start_date=dates.days_ago(1), # Override to match your needs
) as dag:
# [START how_to_dataprep_run_job_group_operator]
diff --git a/airflow/providers/google/cloud/example_dags/example_dataproc.py
b/airflow/providers/google/cloud/example_dags/example_dataproc.py
index 9694eb8..4959498 100644
--- a/airflow/providers/google/cloud/example_dags/example_dataproc.py
+++ b/airflow/providers/google/cloud/example_dags/example_dataproc.py
@@ -151,7 +151,7 @@ WORKFLOW_TEMPLATE = {
}
-with models.DAG("example_gcp_dataproc", start_date=days_ago(1),
schedule_interval=None) as dag:
+with models.DAG("example_gcp_dataproc", schedule_interval='@once',
start_date=days_ago(1)) as dag:
# [START how_to_cloud_dataproc_create_cluster_operator]
create_cluster = DataprocCreateClusterOperator(
task_id="create_cluster",
diff --git a/airflow/providers/google/cloud/example_dags/example_datastore.py
b/airflow/providers/google/cloud/example_dags/example_datastore.py
index 3ef605d..8dc1f50 100644
--- a/airflow/providers/google/cloud/example_dags/example_datastore.py
+++ b/airflow/providers/google/cloud/example_dags/example_datastore.py
@@ -42,7 +42,7 @@ BUCKET = os.environ.get("GCP_DATASTORE_BUCKET",
"datastore-system-test")
with models.DAG(
"example_gcp_datastore",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
tags=["example"],
) as dag:
@@ -83,7 +83,7 @@ TRANSACTION_OPTIONS: Dict[str, Any] = {"readWrite": {}}
with models.DAG(
"example_gcp_datastore_operations",
start_date=dates.days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=["example"],
) as dag2:
# [START how_to_allocate_ids]
diff --git a/airflow/providers/google/cloud/example_dags/example_dlp.py
b/airflow/providers/google/cloud/example_dags/example_dlp.py
index 9a056d1..2199877 100644
--- a/airflow/providers/google/cloud/example_dags/example_dlp.py
+++ b/airflow/providers/google/cloud/example_dags/example_dlp.py
@@ -61,7 +61,7 @@ OBJECT_GCS_OUTPUT_URI = os.path.join(OUTPUT_BUCKET, "tmp",
OUTPUT_FILENAME)
with models.DAG(
"example_gcp_dlp",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag1:
@@ -110,7 +110,7 @@ UPDATE_CUSTOM_INFO_TYPE = {
with models.DAG(
"example_gcp_dlp_info_types",
- schedule_interval=None,
+ schedule_interval='@once',
start_date=days_ago(1),
tags=["example", "dlp", "info-types"],
) as dag2:
@@ -152,7 +152,7 @@ JOB_TRIGGER = {
TRIGGER_ID = "example_trigger"
with models.DAG(
- "example_gcp_dlp_job", schedule_interval=None, start_date=days_ago(1),
tags=["example", "dlp_job"]
+ "example_gcp_dlp_job", schedule_interval='@once', start_date=days_ago(1),
tags=["example", "dlp_job"]
) as dag3: # [START howto_operator_dlp_create_job_trigger]
create_trigger = CloudDLPCreateJobTriggerOperator(
project_id=GCP_PROJECT,
@@ -195,7 +195,7 @@ DEIDENTIFY_CONFIG = {
with models.DAG(
"example_gcp_dlp_deidentify_content",
- schedule_interval=None,
+ schedule_interval='@once',
start_date=days_ago(1),
tags=["example", "dlp", "deidentify"],
) as dag4:
diff --git
a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
index 920935d..0ffe21c 100644
--- a/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_facebook_ads_to_gcs.py
@@ -56,7 +56,7 @@ PARAMS = {'level': 'ad', 'date_preset': 'yesterday'}
with models.DAG(
"example_facebook_ads_to_gcs",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_functions.py
b/airflow/providers/google/cloud/example_dags/example_functions.py
index 3730d37..7c6cdb8 100644
--- a/airflow/providers/google/cloud/example_dags/example_functions.py
+++ b/airflow/providers/google/cloud/example_dags/example_functions.py
@@ -96,7 +96,7 @@ else:
with models.DAG(
'example_gcp_function',
default_args=default_args,
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs.py
b/airflow/providers/google/cloud/example_dags/example_gcs.py
index 94bdfbc..684a4b4 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs.py
@@ -60,7 +60,7 @@ BUCKET_FILE_LOCATION = PATH_TO_UPLOAD_FILE.rpartition("/")[-1]
with models.DAG(
"example_gcs",
start_date=days_ago(1),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
create_bucket1 = GCSCreateBucketOperator(
@@ -160,7 +160,7 @@ with models.DAG(
with models.DAG(
"example_gcs_sensors",
start_date=days_ago(1),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag2:
create_bucket = GCSCreateBucketOperator(
diff --git
a/airflow/providers/google/cloud/example_dags/example_gcs_timespan_file_transform.py
b/airflow/providers/google/cloud/example_dags/example_gcs_timespan_file_transform.py
index d4e37aa..b4c4332 100644
---
a/airflow/providers/google/cloud/example_dags/example_gcs_timespan_file_transform.py
+++
b/airflow/providers/google/cloud/example_dags/example_gcs_timespan_file_transform.py
@@ -41,7 +41,7 @@ PATH_TO_TRANSFORM_SCRIPT = os.environ.get(
with models.DAG(
"example_gcs_timespan_file_transform",
start_date=days_ago(1),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
index 1727d9f..f3c88b5 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_bigquery.py
@@ -36,7 +36,7 @@ TABLE_NAME = os.environ.get("GCP_TABLE_NAME",
'gcs_to_bq_table')
dag = models.DAG(
dag_id='example_gcs_to_bigquery_operator',
start_date=days_ago(2),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
)
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
index d29367e..7086c94 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_gcs.py
@@ -39,7 +39,7 @@ OBJECT_1 = os.environ.get("GCP_GCS_OBJECT_1",
"test-gcs-to-gcs-1")
OBJECT_2 = os.environ.get("GCP_GCS_OBJECT_2", "test-gcs-to-gcs-2")
with models.DAG(
- "example_gcs_to_gcs", start_date=days_ago(1), schedule_interval=None,
tags=['example']
+ "example_gcs_to_gcs", schedule_interval='@once', start_date=days_ago(1),
tags=['example']
) as dag:
# [START howto_synch_bucket]
sync_bucket = GCSSynchronizeBucketsOperator(
diff --git
a/airflow/providers/google/cloud/example_dags/example_gcs_to_local.py
b/airflow/providers/google/cloud/example_dags/example_gcs_to_local.py
index c039ed1..16b5afd 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_local.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_local.py
@@ -30,7 +30,7 @@ PATH_TO_LOCAL_FILE =
os.environ.get("GCP_GCS_PATH_TO_SAVED_FILE", "test-gcs-exam
with models.DAG(
"example_gcs_to_local",
start_date=days_ago(1),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_gcs_download_file_task]
diff --git a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
index 408070f..c31a8f9 100644
--- a/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
+++ b/airflow/providers/google/cloud/example_dags/example_gcs_to_sftp.py
@@ -37,7 +37,7 @@ DESTINATION_PATH_3 = "/tmp/dest-dir-2/"
with models.DAG(
- "example_gcs_to_sftp", start_date=days_ago(1), schedule_interval=None,
tags=['example']
+ "example_gcs_to_sftp", schedule_interval='@once', start_date=days_ago(1),
tags=['example']
) as dag:
# [START howto_operator_gcs_to_sftp_copy_single_file]
copy_file_from_gcs_to_sftp = GCSToSFTPOperator(
diff --git
a/airflow/providers/google/cloud/example_dags/example_gdrive_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_gdrive_to_gcs.py
index 974fa66..2059850 100644
--- a/airflow/providers/google/cloud/example_dags/example_gdrive_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_gdrive_to_gcs.py
@@ -31,7 +31,7 @@ FILE_NAME = os.environ.get("FILE_NAME", "file.pdf")
with models.DAG(
"example_gdrive_to_gcs_with_gdrive_sensor",
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=["example"],
) as dag:
# [START detect_file]
diff --git
a/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py
b/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py
index a096b9c..e05ad33 100644
--- a/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py
+++ b/airflow/providers/google/cloud/example_dags/example_kubernetes_engine.py
@@ -40,7 +40,7 @@ CLUSTER = {"name": CLUSTER_NAME, "initial_node_count": 1}
with models.DAG(
"example_gcp_gke",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_life_sciences.py
b/airflow/providers/google/cloud/example_dags/example_life_sciences.py
index 1bd035d..b49b6a6 100644
--- a/airflow/providers/google/cloud/example_dags/example_life_sciences.py
+++ b/airflow/providers/google/cloud/example_dags/example_life_sciences.py
@@ -76,7 +76,7 @@ MULTI_ACTION_PIPELINE = {
with models.DAG(
"example_gcp_life_sciences",
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
index eafad5d..f725db7 100644
--- a/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_local_to_gcs.py
@@ -31,7 +31,7 @@ DESTINATION_FILE_LOCATION =
os.environ.get('GCP_GCS_DESTINATION_FILE_LOCATION',
with models.DAG(
'example_local_to_gcs',
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_local_filesystem_to_gcs]
diff --git a/airflow/providers/google/cloud/example_dags/example_mlengine.py
b/airflow/providers/google/cloud/example_dags/example_mlengine.py
index 082392c..670dd94 100644
--- a/airflow/providers/google/cloud/example_dags/example_mlengine.py
+++ b/airflow/providers/google/cloud/example_dags/example_mlengine.py
@@ -59,7 +59,7 @@ SUMMARY_STAGING =
os.environ.get("GCP_MLENGINE_DATAFLOW_STAGING", "gs://INVALID
with models.DAG(
"example_gcp_mlengine",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
params={"model_name": MODEL_NAME},
diff --git
a/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
index cdf97dc..651a6f8 100644
--- a/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_mysql_to_gcs.py
@@ -29,7 +29,7 @@ SQL_QUERY = "SELECT * from test_table"
with models.DAG(
'example_mysql_to_gcs',
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_mysql_to_gcs]
diff --git
a/airflow/providers/google/cloud/example_dags/example_natural_language.py
b/airflow/providers/google/cloud/example_dags/example_natural_language.py
index 558e929..134bec8 100644
--- a/airflow/providers/google/cloud/example_dags/example_natural_language.py
+++ b/airflow/providers/google/cloud/example_dags/example_natural_language.py
@@ -51,7 +51,7 @@ document_gcs = Document(gcs_content_uri=GCS_CONTENT_URI,
type="PLAIN_TEXT")
with models.DAG(
"example_gcp_natural_language",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
index 677bd4c..1ccadd2 100644
--- a/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_postgres_to_gcs.py
@@ -31,7 +31,7 @@ SQL_QUERY = "select * from test_table;"
with models.DAG(
dag_id='example_postgres_to_gcs',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
index cf82a48..e7c434c 100644
--- a/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_presto_to_gcs.py
@@ -48,7 +48,7 @@ def safe_name(s: str) -> str:
with models.DAG(
dag_id="example_presto_to_gcs",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_pubsub.py
b/airflow/providers/google/cloud/example_dags/example_pubsub.py
index 0dbf4f4..66a676d 100644
--- a/airflow/providers/google/cloud/example_dags/example_pubsub.py
+++ b/airflow/providers/google/cloud/example_dags/example_pubsub.py
@@ -49,7 +49,7 @@ echo_cmd = """
with models.DAG(
"example_gcp_pubsub_sensor",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
) as example_sensor_dag:
# [START howto_operator_gcp_pubsub_create_topic]
@@ -112,7 +112,7 @@ with models.DAG(
with models.DAG(
"example_gcp_pubsub_operator",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
) as example_operator_dag:
# [START howto_operator_gcp_pubsub_create_topic]
diff --git a/airflow/providers/google/cloud/example_dags/example_s3_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_s3_to_gcs.py
index 94e90a3..118abfb 100644
--- a/airflow/providers/google/cloud/example_dags/example_s3_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_s3_to_gcs.py
@@ -40,7 +40,7 @@ def upload_file():
with models.DAG(
'example_s3_to_gcs',
- schedule_interval=None,
+ schedule_interval='@once',
start_date=days_ago(2),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py
index be28864..899fc51 100644
--- a/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_salesforce_to_gcs.py
@@ -44,7 +44,7 @@ SALESFORCE_CONN_ID = os.environ.get("SALESFORCE_CONN_ID",
"salesforce_default")
with models.DAG(
"example_salesforce_to_gcs",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
) as dag:
create_bucket = GCSCreateBucketOperator(
diff --git a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
index ec197b9..eceb803 100644
--- a/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_sftp_to_gcs.py
@@ -36,7 +36,7 @@ OBJECT_SRC_2 = "parent-2.bin"
OBJECT_SRC_3 = "parent-3.txt"
-with models.DAG("example_sftp_to_gcs", start_date=days_ago(1),
schedule_interval=None) as dag:
+with models.DAG("example_sftp_to_gcs", schedule_interval='@once',
start_date=days_ago(1)) as dag:
# [START howto_operator_sftp_to_gcs_copy_single_file]
copy_file_from_sftp_to_gcs = SFTPToGCSOperator(
task_id="file-copy-sftp-to-gcs",
diff --git
a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
index c09306c..237583f 100644
--- a/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_sheets_to_gcs.py
@@ -28,7 +28,7 @@ SPREADSHEET_ID = os.environ.get("SPREADSHEET_ID",
"1234567890qwerty")
with models.DAG(
"example_sheets_to_gcs",
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=["example"],
) as dag:
# [START upload_sheet_to_gcs]
diff --git a/airflow/providers/google/cloud/example_dags/example_spanner.py
b/airflow/providers/google/cloud/example_dags/example_spanner.py
index 7d24a04..d929189 100644
--- a/airflow/providers/google/cloud/example_dags/example_spanner.py
+++ b/airflow/providers/google/cloud/example_dags/example_spanner.py
@@ -58,7 +58,7 @@ OPERATION_ID = 'unique_operation_id'
with models.DAG(
'example_gcp_spanner',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py
b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py
index f067f9d..c1c572a 100644
--- a/airflow/providers/google/cloud/example_dags/example_speech_to_text.py
+++ b/airflow/providers/google/cloud/example_dags/example_speech_to_text.py
@@ -44,7 +44,7 @@ AUDIO = {"uri": f"gs://{BUCKET_NAME}/{FILENAME}"}
with models.DAG(
"example_gcp_speech_to_text",
start_date=dates.days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag:
text_to_speech_synthesize_task = CloudTextToSpeechSynthesizeOperator(
diff --git a/airflow/providers/google/cloud/example_dags/example_stackdriver.py
b/airflow/providers/google/cloud/example_dags/example_stackdriver.py
index 9c418b7..6c60763 100644
--- a/airflow/providers/google/cloud/example_dags/example_stackdriver.py
+++ b/airflow/providers/google/cloud/example_dags/example_stackdriver.py
@@ -116,7 +116,7 @@ TEST_NOTIFICATION_CHANNEL_2 = {
with models.DAG(
'example_stackdriver',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_tasks.py
b/airflow/providers/google/cloud/example_dags/example_tasks.py
index 49e21de..99b24a1 100644
--- a/airflow/providers/google/cloud/example_dags/example_tasks.py
+++ b/airflow/providers/google/cloud/example_dags/example_tasks.py
@@ -67,7 +67,7 @@ TASK = {
with models.DAG(
"example_gcp_tasks",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_text_to_speech.py
b/airflow/providers/google/cloud/example_dags/example_text_to_speech.py
index 172f0d2..1c865c5 100644
--- a/airflow/providers/google/cloud/example_dags/example_text_to_speech.py
+++ b/airflow/providers/google/cloud/example_dags/example_text_to_speech.py
@@ -38,7 +38,7 @@ AUDIO_CONFIG = {"audio_encoding": "LINEAR16"}
with models.DAG(
"example_gcp_text_to_speech",
start_date=dates.days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_translate.py
b/airflow/providers/google/cloud/example_dags/example_translate.py
index b62d7f0..6acaa68 100644
--- a/airflow/providers/google/cloud/example_dags/example_translate.py
+++ b/airflow/providers/google/cloud/example_dags/example_translate.py
@@ -29,7 +29,7 @@ from airflow.utils.dates import days_ago
with models.DAG(
'example_gcp_translate',
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_translate_speech.py
b/airflow/providers/google/cloud/example_dags/example_translate_speech.py
index 9548d18..c9dc27e 100644
--- a/airflow/providers/google/cloud/example_dags/example_translate_speech.py
+++ b/airflow/providers/google/cloud/example_dags/example_translate_speech.py
@@ -48,7 +48,7 @@ SOURCE_LANGUAGE = None # type: None
with models.DAG(
"example_gcp_translate_speech",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=dates.days_ago(1),
tags=['example'],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py
b/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py
index 0d8ef02..b47e86f 100644
--- a/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py
+++ b/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py
@@ -48,7 +48,7 @@ def safe_name(s: str) -> str:
with models.DAG(
dag_id="example_trino_to_gcs",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=["example"],
) as dag:
diff --git
a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py
b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py
index 67f98e5..3b5b576 100644
--- a/airflow/providers/google/cloud/example_dags/example_video_intelligence.py
+++ b/airflow/providers/google/cloud/example_dags/example_video_intelligence.py
@@ -49,7 +49,7 @@ INPUT_URI = f"gs://{GCP_BUCKET_NAME}/video.mp4"
with models.DAG(
"example_gcp_video_intelligence",
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
start_date=days_ago(1),
tags=['example'],
) as dag:
diff --git a/airflow/providers/google/cloud/example_dags/example_vision.py
b/airflow/providers/google/cloud/example_dags/example_vision.py
index 5f352ef..cbaefdd 100644
--- a/airflow/providers/google/cloud/example_dags/example_vision.py
+++ b/airflow/providers/google/cloud/example_dags/example_vision.py
@@ -114,7 +114,7 @@ DETECT_IMAGE = {"source": {"image_uri":
GCP_VISION_ANNOTATE_IMAGE_URL}}
# [END howto_operator_vision_detect_image_param]
with models.DAG(
- 'example_gcp_vision_autogenerated_id', start_date=days_ago(1),
schedule_interval=None
+ 'example_gcp_vision_autogenerated_id', schedule_interval='@once',
start_date=days_ago(1)
) as dag_autogenerated_id:
# ################################## #
# ### Autogenerated IDs examples ### #
@@ -269,7 +269,7 @@ with models.DAG(
with models.DAG(
- 'example_gcp_vision_explicit_id', start_date=days_ago(1),
schedule_interval=None
+ 'example_gcp_vision_explicit_id', schedule_interval='@once',
start_date=days_ago(1)
) as dag_explicit_id:
# ############################# #
# ### Explicit IDs examples ### #
@@ -434,7 +434,7 @@ with models.DAG(
remove_product_from_product_set_2 >> product_delete_2
with models.DAG(
- 'example_gcp_vision_annotate_image', start_date=days_ago(1),
schedule_interval=None
+ 'example_gcp_vision_annotate_image', schedule_interval='@once',
start_date=days_ago(1)
) as dag_annotate_image:
# ############################## #
# ### Annotate image example ### #
diff --git a/airflow/providers/google/cloud/example_dags/example_workflows.py
b/airflow/providers/google/cloud/example_dags/example_workflows.py
index 35b5b01..4f3b66f 100644
--- a/airflow/providers/google/cloud/example_dags/example_workflows.py
+++ b/airflow/providers/google/cloud/example_dags/example_workflows.py
@@ -80,7 +80,7 @@ SLEEP_WORKFLOW = {
}
-with DAG("example_cloud_workflows", start_date=days_ago(1),
schedule_interval=None) as dag:
+with DAG("example_cloud_workflows", schedule_interval='@once',
start_date=days_ago(1)) as dag:
# [START how_to_create_workflow]
create_workflow = WorkflowsCreateWorkflowOperator(
task_id="create_workflow",
diff --git
a/airflow/providers/google/firebase/example_dags/example_firestore.py
b/airflow/providers/google/firebase/example_dags/example_firestore.py
index 041b266..cde6a1c 100644
--- a/airflow/providers/google/firebase/example_dags/example_firestore.py
+++ b/airflow/providers/google/firebase/example_dags/example_firestore.py
@@ -73,7 +73,7 @@ if BUCKET_NAME is None:
with models.DAG(
"example_google_firestore",
default_args=dict(start_date=dates.days_ago(1)),
- schedule_interval=None,
+ schedule_interval='@once',
tags=["example"],
) as dag:
# [START howto_operator_export_database_to_gcs]
diff --git a/airflow/providers/google/leveldb/example_dags/example_leveldb.py
b/airflow/providers/google/leveldb/example_dags/example_leveldb.py
index e4a732c..f430620 100644
--- a/airflow/providers/google/leveldb/example_dags/example_leveldb.py
+++ b/airflow/providers/google/leveldb/example_dags/example_leveldb.py
@@ -26,7 +26,7 @@ from airflow.utils.dates import days_ago
with models.DAG(
'example_leveldb',
start_date=days_ago(2),
- schedule_interval=None,
+ schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_leveldb_get_key]
diff --git
a/airflow/providers/google/marketing_platform/example_dags/example_analytics.py
b/airflow/providers/google/marketing_platform/example_dags/example_analytics.py
index 851132d..8c374de 100644
---
a/airflow/providers/google/marketing_platform/example_dags/example_analytics.py
+++
b/airflow/providers/google/marketing_platform/example_dags/example_analytics.py
@@ -40,7 +40,7 @@ DATA_ID = "kjdDu3_tQa6n8Q1kXFtSmg"
with models.DAG(
"example_google_analytics",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag:
# [START howto_marketing_platform_list_accounts_operator]
diff --git
a/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py
b/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py
index bc75e74..3965aa5 100644
---
a/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py
+++
b/airflow/providers/google/marketing_platform/example_dags/example_campaign_manager.py
@@ -86,7 +86,7 @@ CONVERSION_UPDATE = {
with models.DAG(
"example_campaign_manager",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag:
# [START howto_campaign_manager_insert_report_operator]
diff --git
a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
index 5c74c51..8f24f08 100644
---
a/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
+++
b/airflow/providers/google/marketing_platform/example_dags/example_display_video.py
@@ -84,7 +84,7 @@ DOWNLOAD_LINE_ITEMS_REQUEST: Dict = {"filterType":
ADVERTISER_ID, "format": "CSV
with models.DAG(
"example_display_video",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag1:
# [START howto_google_display_video_createquery_report_operator]
@@ -126,7 +126,7 @@ with models.DAG(
with models.DAG(
"example_display_video_misc",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag2:
# [START
howto_google_display_video_upload_multiple_entity_read_files_to_big_query]
@@ -159,7 +159,7 @@ with models.DAG(
with models.DAG(
"example_display_video_sdf",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag3:
# [START howto_google_display_video_create_sdf_download_task_operator]
diff --git
a/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py
b/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py
index f2e0b39..bedd0a4 100644
---
a/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py
+++
b/airflow/providers/google/marketing_platform/example_dags/example_search_ads.py
@@ -46,7 +46,7 @@ REPORT = {
with models.DAG(
"example_search_ads",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=dates.days_ago(1),
) as dag:
# [START howto_search_ads_generate_report_operator]
diff --git
a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
index 1b5eefd..d83e3b5 100644
--- a/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
+++ b/airflow/providers/google/suite/example_dags/example_gcs_to_sheets.py
@@ -30,7 +30,7 @@ NEW_SPREADSHEET_ID = os.environ.get("NEW_SPREADSHEET_ID",
"1234567890qwerty")
with models.DAG(
"example_gcs_to_sheets",
start_date=days_ago(1),
- schedule_interval=None, # Override to match your needs
+ schedule_interval='@once', # Override to match your needs
tags=["example"],
) as dag:
diff --git a/airflow/providers/google/suite/example_dags/example_sheets.py
b/airflow/providers/google/suite/example_dags/example_sheets.py
index f3e5067..16e75ce 100644
--- a/airflow/providers/google/suite/example_dags/example_sheets.py
+++ b/airflow/providers/google/suite/example_dags/example_sheets.py
@@ -36,7 +36,7 @@ SPREADSHEET = {
with models.DAG(
"example_sheets_gcs",
- schedule_interval=None, # Override to match your needs,
+ schedule_interval='@once', # Override to match your needs,
start_date=days_ago(1),
tags=["example"],
) as dag:
diff --git a/tests/providers/google/cloud/hooks/test_bigquery_system.py
b/tests/providers/google/cloud/hooks/test_bigquery_system.py
index e1bc4d4..e203055 100644
--- a/tests/providers/google/cloud/hooks/test_bigquery_system.py
+++ b/tests/providers/google/cloud/hooks/test_bigquery_system.py
@@ -28,6 +28,7 @@ from tests.test_utils.gcp_system_helpers import
GoogleSystemTest
@pytest.mark.credential_file(GCP_BIGQUERY_KEY)
class BigQueryDataframeResultsSystemTest(GoogleSystemTest):
def setUp(self):
+ super().setUp()
self.instance = hook.BigQueryHook()
def test_output_is_dataframe_with_valid_query(self):
@@ -53,3 +54,6 @@ class BigQueryDataframeResultsSystemTest(GoogleSystemTest):
with pytest.raises(Exception) as ctx:
self.instance.get_pandas_df('select * except(b) from (select 1 a,
2 b)', dialect='legacy')
assert 'Reason: ' in str(ctx.value), ""
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/hooks/test_kms_system.py
b/tests/providers/google/cloud/hooks/test_kms_system.py
index 6963430..24d35e8 100644
--- a/tests/providers/google/cloud/hooks/test_kms_system.py
+++ b/tests/providers/google/cloud/hooks/test_kms_system.py
@@ -34,6 +34,9 @@ GCP_KMS_KEY_NAME = os.environ.get('GCP_KMS_KEY_NAME',
'test-airflow-system-tests
@pytest.mark.credential_file(GCP_KMS_KEY)
class TestKmsHook(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_KMS_KEY)
def test_encrypt(self):
with TemporaryDirectory() as tmp_dir:
@@ -102,3 +105,6 @@ class TestKmsHook(GoogleSystemTest):
ciphertext=encrypted_secret,
)
assert content == b"TEST-SECRET"
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/hooks/test_secret_manager_system.py
b/tests/providers/google/cloud/hooks/test_secret_manager_system.py
index 9c7e859..7449645 100644
--- a/tests/providers/google/cloud/hooks/test_secret_manager_system.py
+++ b/tests/providers/google/cloud/hooks/test_secret_manager_system.py
@@ -49,6 +49,9 @@ def helper_two_versions():
@pytest.mark.system("google.secret_manager")
@pytest.mark.credential_file(GCP_SECRET_MANAGER_KEY)
class TestSystemSecretsManager(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@pytest.mark.usefixtures("helper_one_version")
@provide_gcp_context(GCP_SECRET_MANAGER_KEY)
def test_read_secret_from_secret_manager(self):
@@ -73,3 +76,6 @@ class TestSystemSecretsManager(GoogleSystemTest):
assert TEST_SECRET_VALUE == secret
secret = hook.get_secret(secret_id=TEST_SECRET_ID, secret_version='2')
assert TEST_SECRET_VALUE_UPDATED == secret
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py
b/tests/providers/google/cloud/log/test_gcs_task_handler_system.py
index 4bc2a62..c5fdc03 100644
--- a/tests/providers/google/cloud/log/test_gcs_task_handler_system.py
+++ b/tests/providers/google/cloud/log/test_gcs_task_handler_system.py
@@ -52,6 +52,7 @@ class TestGCSTaskHandlerSystemTest(GoogleSystemTest):
cls.delete_gcs_bucket(cls.bucket_name) # type: ignore
def setUp(self) -> None:
+ super().setUp()
clear_db_runs()
def tearDown(self) -> None:
@@ -60,6 +61,7 @@ class TestGCSTaskHandlerSystemTest(GoogleSystemTest):
importlib.reload(airflow_local_settings)
settings.configure_logging()
clear_db_runs()
+ super().tearDown()
@provide_session
def test_should_read_logs(self, session):
diff --git
a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
b/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
index c4827da..fcaed0e 100644
--- a/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
+++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler_system.py
@@ -18,7 +18,6 @@ import importlib
import random
import string
import subprocess
-import unittest
from unittest import mock
import pytest
@@ -31,13 +30,18 @@ from airflow.utils.session import provide_session
from tests.providers.google.cloud.utils.gcp_authenticator import
GCP_STACKDRIVER
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_runs
-from tests.test_utils.gcp_system_helpers import provide_gcp_context,
resolve_full_gcp_key_path
+from tests.test_utils.gcp_system_helpers import (
+ GoogleSystemTest,
+ provide_gcp_context,
+ resolve_full_gcp_key_path,
+)
@pytest.mark.system("google")
@pytest.mark.credential_file(GCP_STACKDRIVER)
-class TestStackdriverLoggingHandlerSystemTest(unittest.TestCase):
+class TestStackdriverLoggingHandlerSystemTest(GoogleSystemTest):
def setUp(self) -> None:
+ super().setUp()
clear_db_runs()
self.log_name =
'stackdriver-tests-'.join(random.sample(string.ascii_lowercase, 16))
@@ -47,6 +51,7 @@ class
TestStackdriverLoggingHandlerSystemTest(unittest.TestCase):
importlib.reload(airflow_local_settings)
settings.configure_logging()
clear_db_runs()
+ super().tearDown()
@provide_session
def test_should_support_key_auth(self, session):
diff --git a/tests/providers/google/cloud/operators/test_automl_system.py
b/tests/providers/google/cloud/operators/test_automl_system.py
index cc3e487..911296a 100644
--- a/tests/providers/google/cloud/operators/test_automl_system.py
+++ b/tests/providers/google/cloud/operators/test_automl_system.py
@@ -25,15 +25,27 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.credential_file(GCP_AUTOML_KEY)
@pytest.mark.long_running
class AutoMLDatasetOperationsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_AUTOML_KEY)
def test_run_example_dag(self):
self.run_dag('example_automl_dataset', CLOUD_DAG_FOLDER)
+ def tearDown(self):
+ super().tearDown()
+
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_AUTOML_KEY)
@pytest.mark.long_running
class AutoMLModelOperationsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_AUTOML_KEY)
def test_run_example_dag(self):
self.run_dag('example_create_and_deploy', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_bigtable_system.py
b/tests/providers/google/cloud/operators/test_bigtable_system.py
index ea83493..bb4e348 100644
--- a/tests/providers/google/cloud/operators/test_bigtable_system.py
+++ b/tests/providers/google/cloud/operators/test_bigtable_system.py
@@ -26,6 +26,9 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_BIGTABLE_KEY)
class BigTableExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_BIGTABLE_KEY)
def test_run_example_dag_gcs_bigtable(self):
self.run_dag('example_gcp_bigtable_operators', CLOUD_DAG_FOLDER)
diff --git a/tests/providers/google/cloud/operators/test_cloud_sql_system.py
b/tests/providers/google/cloud/operators/test_cloud_sql_system.py
index 0d6e274..3639426 100644
--- a/tests/providers/google/cloud/operators/test_cloud_sql_system.py
+++ b/tests/providers/google/cloud/operators/test_cloud_sql_system.py
@@ -41,6 +41,9 @@ SQL_QUERY_TEST_HELPER = CloudSqlQueryTestHelper()
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_CLOUDSQL_KEY)
class CloudSqlExampleDagsIntegrationTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_CLOUDSQL_KEY)
def tearDown(self):
if os.path.exists(TEARDOWN_LOCK_FILE):
@@ -104,6 +107,12 @@ class CloudSqlProxySystemTest(GoogleSystemTest):
gcp_authenticator.gcp_authenticate()
helper.delete_instances(instance_suffix=QUERY_SUFFIX)
+ def setUp(self):
+ super().setUp()
+
+ def tearDown(self):
+ super().tearDown()
+
@staticmethod
def generate_unique_path():
return ''.join(random.choice(string.ascii_letters + string.digits) for
_ in range(8))
diff --git
a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service_system.py
b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service_system.py
index 5e4a895..0525fd0 100644
---
a/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service_system.py
+++
b/tests/providers/google/cloud/operators/test_cloud_storage_transfer_service_system.py
@@ -50,7 +50,13 @@ def helper():
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_GCS_TRANSFER_KEY)
class GcpTransferExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@pytest.mark.usefixtures("helper")
@provide_gcp_context(GCP_GCS_TRANSFER_KEY)
def test_run_example_dag_compute(self):
self.run_dag('example_gcp_transfer', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_datacatalog_system.py
b/tests/providers/google/cloud/operators/test_datacatalog_system.py
index 724faf8..b81ee73 100644
--- a/tests/providers/google/cloud/operators/test_datacatalog_system.py
+++ b/tests/providers/google/cloud/operators/test_datacatalog_system.py
@@ -23,6 +23,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.credential_file(GCP_DATACATALOG_KEY)
class CloudDataflowExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_DATACATALOG_KEY)
def test_run_example_gcp_dataflow_native_java(self):
self.run_dag('example_gcp_datacatalog', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_dataflow_system.py
b/tests/providers/google/cloud/operators/test_dataflow_system.py
index 067ecf1..dc6b176 100644
--- a/tests/providers/google/cloud/operators/test_dataflow_system.py
+++ b/tests/providers/google/cloud/operators/test_dataflow_system.py
@@ -80,6 +80,7 @@ EXAMPLE_FLEX_TEMPLATE_SUBDIR =
"dataflow/flex-templates/streaming_beam_sql"
class CloudDataflowExampleDagFlexTemplateJavagSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_GCS_TRANSFER_KEY,
project_id=GoogleSystemTest._project_id())
def setUp(self) -> None:
+ super().setUp()
# Create a Cloud Storage bucket
self.execute_cmd(["gsutil", "mb",
f"gs://{GCS_FLEX_TEMPLATE_BUCKET_NAME}"])
@@ -249,6 +250,7 @@ class
CloudDataflowExampleDagFlexTemplateJavagSystemTest(GoogleSystemTest):
# Delete the Cloud Storage bucket
self.execute_cmd(["gsutil", "rm", "-r",
f"gs://{GCS_FLEX_TEMPLATE_BUCKET_NAME}"])
+ super().tearDown()
@pytest.mark.backend("mysql", "postgres")
@@ -256,6 +258,7 @@ class
CloudDataflowExampleDagFlexTemplateJavagSystemTest(GoogleSystemTest):
class CloudDataflowExampleDagSqlSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_GCS_TRANSFER_KEY,
project_id=GoogleSystemTest._project_id())
def setUp(self) -> None:
+ super().setUp()
# Build image with pipeline
with NamedTemporaryFile(suffix=".csv") as f:
f.write(
@@ -363,3 +366,4 @@ class
CloudDataflowExampleDagSqlSystemTest(GoogleSystemTest):
)
# Delete the BigQuery dataset,
self.execute_cmd(["bq", "rm", "-r", "-f", "-d",
f'{self._project_id()}:{BQ_SQL_DATASET}'])
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_datafusion_system.py
b/tests/providers/google/cloud/operators/test_datafusion_system.py
index 16ddf35..812fc2e 100644
--- a/tests/providers/google/cloud/operators/test_datafusion_system.py
+++ b/tests/providers/google/cloud/operators/test_datafusion_system.py
@@ -26,12 +26,14 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.credential_file(GCP_DATAFUSION_KEY)
class CloudDataFusionExampleDagsSystemTest(GoogleSystemTest):
def setUp(self) -> None:
+ super().setUp()
self.create_gcs_bucket(name=BUCKET_1)
self.create_gcs_bucket(name=BUCKET_2)
def tearDown(self) -> None:
self.delete_gcs_bucket(name=BUCKET_1)
self.delete_gcs_bucket(name=BUCKET_2)
+ super().tearDown()
@provide_gcp_context(GCP_DATAFUSION_KEY)
def test_run_example_dag_function(self):
diff --git a/tests/providers/google/cloud/operators/test_dataprep_system.py
b/tests/providers/google/cloud/operators/test_dataprep_system.py
index 03e19d2..e6cd98c 100644
--- a/tests/providers/google/cloud/operators/test_dataprep_system.py
+++ b/tests/providers/google/cloud/operators/test_dataprep_system.py
@@ -48,6 +48,7 @@ class DataprepExampleDagsTest(GoogleSystemTest):
def tearDown(self):
clear_db_connections()
+ super().tearDown()
def test_run_example_dag(self):
self.run_dag(dag_id="example_dataprep", dag_folder=CLOUD_DAG_FOLDER)
diff --git a/tests/providers/google/cloud/operators/test_dlp_system.py
b/tests/providers/google/cloud/operators/test_dlp_system.py
index b43e8e3..1b754fa 100644
--- a/tests/providers/google/cloud/operators/test_dlp_system.py
+++ b/tests/providers/google/cloud/operators/test_dlp_system.py
@@ -40,6 +40,9 @@ def helper():
@pytest.mark.usefixtures("helper")
@pytest.mark.credential_file(GCP_DLP_KEY)
class GcpDLPExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_DLP_KEY)
def test_run_example_dag(self):
self.run_dag('example_gcp_dlp', CLOUD_DAG_FOLDER)
@@ -55,3 +58,6 @@ class GcpDLPExampleDagsSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_DLP_KEY)
def test_run_example_dlp_deidentify_content(self):
self.run_dag('example_gcp_dlp_deidentify_content', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_functions_system.py
b/tests/providers/google/cloud/operators/test_functions_system.py
index b72bc94..b8e31e5 100644
--- a/tests/providers/google/cloud/operators/test_functions_system.py
+++ b/tests/providers/google/cloud/operators/test_functions_system.py
@@ -24,6 +24,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_FUNCTION_KEY)
class GcpFunctionExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_FUNCTION_KEY)
def test_run_example_dag_function(self):
self.run_dag('example_gcp_function', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/operators/test_kubernetes_engine_system.py
b/tests/providers/google/cloud/operators/test_kubernetes_engine_system.py
index 21a677e..5f1cfad 100644
--- a/tests/providers/google/cloud/operators/test_kubernetes_engine_system.py
+++ b/tests/providers/google/cloud/operators/test_kubernetes_engine_system.py
@@ -24,6 +24,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_GKE_KEY)
class KubernetesEngineExampleDagTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_GKE_KEY)
def test_run_example_gcp_gke(self):
self.run_dag('example_gcp_gke', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/operators/test_natural_language_system.py
b/tests/providers/google/cloud/operators/test_natural_language_system.py
index 80e57f7..4bcd05f 100644
--- a/tests/providers/google/cloud/operators/test_natural_language_system.py
+++ b/tests/providers/google/cloud/operators/test_natural_language_system.py
@@ -24,6 +24,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_AI_KEY)
class CloudNaturalLanguageExampleDagsTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_AI_KEY)
def test_run_example_dag(self):
self.run_dag('example_gcp_natural_language', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_pubsub_system.py
b/tests/providers/google/cloud/operators/test_pubsub_system.py
index 8594dde..989ae93 100644
--- a/tests/providers/google/cloud/operators/test_pubsub_system.py
+++ b/tests/providers/google/cloud/operators/test_pubsub_system.py
@@ -24,6 +24,9 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_PUBSUB_KEY)
class PubSubSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_PUBSUB_KEY)
def test_run_example_sensor_dag(self):
self.run_dag(dag_id="example_gcp_pubsub_sensor",
dag_folder=CLOUD_DAG_FOLDER)
@@ -31,3 +34,6 @@ class PubSubSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_PUBSUB_KEY)
def test_run_example_operator_dag(self):
self.run_dag(dag_id="example_gcp_pubsub_operator",
dag_folder=CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_spanner_system.py
b/tests/providers/google/cloud/operators/test_spanner_system.py
index 460ff91..3188963 100644
--- a/tests/providers/google/cloud/operators/test_spanner_system.py
+++ b/tests/providers/google/cloud/operators/test_spanner_system.py
@@ -29,6 +29,9 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_SPANNER_KEY)
class CloudSpannerExampleDagsTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_SPANNER_KEY)
def tearDown(self):
self.execute_with_ctx(
diff --git a/tests/providers/google/cloud/operators/test_stackdriver_system.py
b/tests/providers/google/cloud/operators/test_stackdriver_system.py
index 1d76603..49311aa 100644
--- a/tests/providers/google/cloud/operators/test_stackdriver_system.py
+++ b/tests/providers/google/cloud/operators/test_stackdriver_system.py
@@ -25,6 +25,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_STACKDRIVER)
class GCPTextToSpeechExampleDagSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_STACKDRIVER)
def test_run_example_dag(self):
self.run_dag("example_stackdriver", CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_tasks_system.py
b/tests/providers/google/cloud/operators/test_tasks_system.py
index 6d6237f..8d92ff7 100644
--- a/tests/providers/google/cloud/operators/test_tasks_system.py
+++ b/tests/providers/google/cloud/operators/test_tasks_system.py
@@ -23,6 +23,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.credential_file(GCP_TASKS_KEY)
class GcpTasksExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_TASKS_KEY)
def test_run_example_dag_function(self):
self.run_dag('example_gcp_tasks', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/operators/test_translate_system.py
b/tests/providers/google/cloud/operators/test_translate_system.py
index 03f71db..1ffe067 100644
--- a/tests/providers/google/cloud/operators/test_translate_system.py
+++ b/tests/providers/google/cloud/operators/test_translate_system.py
@@ -24,6 +24,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_AI_KEY)
class CloudTranslateExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_AI_KEY)
def test_run_example_dag_function(self):
self.run_dag('example_gcp_translate', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/operators/test_video_intelligence_system.py
b/tests/providers/google/cloud/operators/test_video_intelligence_system.py
index f729675..8c92bf1 100644
--- a/tests/providers/google/cloud/operators/test_video_intelligence_system.py
+++ b/tests/providers/google/cloud/operators/test_video_intelligence_system.py
@@ -32,12 +32,12 @@ GCP_VIDEO_SOURCE_URL =
"https://www.sample-videos.com/video123/mp4/720/big_buck_
class CloudVideoIntelligenceExampleDagsTest(GoogleSystemTest):
@provide_gcp_context(GCP_AI_KEY)
def setUp(self):
+ super().setUp()
self.create_gcs_bucket(GCP_BUCKET_NAME, location="europe-north1")
self.execute_with_ctx(
cmd=["bash", "-c", f"curl {GCP_VIDEO_SOURCE_URL} | gsutil cp -
gs://{GCP_BUCKET_NAME}/video.mp4"],
key=GCP_GCS_KEY,
)
- super().setUp()
@provide_gcp_context(GCP_AI_KEY)
def tearDown(self):
diff --git a/tests/providers/google/cloud/operators/test_workflows_system.py
b/tests/providers/google/cloud/operators/test_workflows_system.py
index 0a768ed..1c28e88 100644
--- a/tests/providers/google/cloud/operators/test_workflows_system.py
+++ b/tests/providers/google/cloud/operators/test_workflows_system.py
@@ -24,6 +24,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.system("google.cloud")
@pytest.mark.credential_file(GCP_WORKFLOWS_KEY)
class CloudVisionExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_WORKFLOWS_KEY)
def test_run_example_workflow_dag(self):
self.run_dag('example_cloud_workflows', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/cloud/secrets/test_secret_manager_system.py
b/tests/providers/google/cloud/secrets/test_secret_manager_system.py
index a765202..c3d0f26 100644
--- a/tests/providers/google/cloud/secrets/test_secret_manager_system.py
+++ b/tests/providers/google/cloud/secrets/test_secret_manager_system.py
@@ -31,6 +31,7 @@ BACKEND_IMPORT_PATH =
"airflow.providers.google.cloud.secrets.secret_manager.Clo
@pytest.mark.credential_file(GCP_SECRET_MANAGER_KEY)
class CloudSecretManagerBackendVariableSystemTest(GoogleSystemTest):
def setUp(self) -> None:
+ super().setUp()
self.unique_suffix = "".join(random.choices(string.ascii_lowercase,
k=10))
self.name = f"airflow-system-test-{self.unique_suffix}"
self.secret_name = f"airflow-variables-{self.name}"
@@ -47,11 +48,13 @@ class
CloudSecretManagerBackendVariableSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_SECRET_MANAGER_KEY,
project_id=GoogleSystemTest._project_id())
def tearDown(self) -> None:
subprocess.run(["gcloud", "secrets", "delete", self.secret_name,
"--quiet"], check=False)
+ super().tearDown()
@pytest.mark.credential_file(GCP_SECRET_MANAGER_KEY)
class CloudSecretManagerBackendConnectionSystemTest(GoogleSystemTest):
def setUp(self) -> None:
+ super().setUp()
self.unique_suffix = "".join(random.choices(string.ascii_lowercase,
k=10))
self.name = f"airflow-system-test-{self.unique_suffix}"
self.secret_name = f"airflow-connections-{self.name}"
@@ -69,3 +72,4 @@ class
CloudSecretManagerBackendConnectionSystemTest(GoogleSystemTest):
@provide_gcp_context(GCP_SECRET_MANAGER_KEY,
project_id=GoogleSystemTest._project_id())
def tearDown(self) -> None:
subprocess.run(["gcloud", "secrets", "delete", self.secret_name,
"--quiet"], check=False)
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py
b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py
index 7b02938..8253063 100644
---
a/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py
+++
b/tests/providers/google/cloud/transfers/test_azure_fileshare_to_gcs_system.py
@@ -73,7 +73,13 @@ def provide_gcs_bucket_basic():
@pytest.mark.credential_file(GCP_GCS_KEY)
@pytest.mark.system("google.cloud")
class AzureFileShareToGCSOperatorExampleDAGsTest(GoogleSystemTest,
AzureSystemTest):
+ def setUp(self):
+ super().setUp()
+
@pytest.mark.usefixtures('provide_gcs_bucket_basic',
'provide_azure_fileshare_with_directory')
@provide_gcp_context(GCP_GCS_KEY)
def test_run_example_dag_azure_fileshare_to_gcs(self):
self.run_dag('azure_fileshare_to_gcs_example', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
index aced64a..845c336 100644
--- a/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
+++ b/tests/providers/google/cloud/transfers/test_bigquery_to_bigquery_system.py
@@ -26,6 +26,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.system("google.cloud")
@pytest.mark.credential_file(GCP_BIGQUERY_KEY)
class BigQueryToBigQueryExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_BIGQUERY_KEY)
def test_run_example_dag_queries(self):
self.run_dag('example_bigquery_to_bigquery', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
index a912629..304a30a 100644
--- a/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
+++ b/tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py
@@ -62,7 +62,13 @@ def provide_facebook_connection(key_file_path: str):
@pytest.mark.credential_file(GCP_BIGQUERY_KEY)
@pytest.mark.system("google.cloud")
class FacebookAdsToGcsExampleDagsSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_BIGQUERY_KEY)
@provide_facebook_connection(FACEBOOK_CREDENTIALS_PATH)
def test_dag_example(self):
self.run_dag("example_facebook_ads_to_gcs", CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
index 13a75bf..5971e94 100644
--- a/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
+++ b/tests/providers/google/cloud/transfers/test_gcs_to_bigquery_system.py
@@ -25,6 +25,12 @@ from tests.test_utils.gcp_system_helpers import
CLOUD_DAG_FOLDER, GoogleSystemTe
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_BIGQUERY_KEY)
class TestGoogleCloudStorageToBigQueryExample(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_BIGQUERY_KEY)
def test_run_example_dag_gcs_to_bigquery_operator(self):
self.run_dag('example_gcs_to_bigquery_operator', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py
b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py
index d5e45b6..86c233b 100644
--- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py
+++ b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs_system.py
@@ -34,7 +34,13 @@ SALESFORCE_CREDENTIALS_PATH = os.path.join(CREDENTIALS_DIR,
SALESFORCE_KEY)
@pytest.mark.system("google.cloud")
@pytest.mark.system("salesforce")
class TestSalesforceIntoGCSExample(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_gcp_context(GCP_BIGQUERY_KEY)
@provide_salesforce_connection(SALESFORCE_CREDENTIALS_PATH)
def test_run_example_dag_salesforce_to_gcs_operator(self):
self.run_dag('example_salesforce_to_gcs', CLOUD_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/leveldb/operators/test_leveldb_system.py
b/tests/providers/google/leveldb/operators/test_leveldb_system.py
index ce355f4..4eebccb 100644
--- a/tests/providers/google/leveldb/operators/test_leveldb_system.py
+++ b/tests/providers/google/leveldb/operators/test_leveldb_system.py
@@ -37,6 +37,12 @@ def provide_leveldb_connection():
@pytest.mark.system("google.leveldb")
class LevelDBSystemTest(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@provide_leveldb_connection()
def test_run_example_dag(self):
self.run_dag('example_leveldb', LEVELDB_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git
a/tests/providers/google/marketing_platform/operators/test_analytics_system.py
b/tests/providers/google/marketing_platform/operators/test_analytics_system.py
index b0e9b19..ca09314 100644
---
a/tests/providers/google/marketing_platform/operators/test_analytics_system.py
+++
b/tests/providers/google/marketing_platform/operators/test_analytics_system.py
@@ -48,7 +48,13 @@ def helper():
@pytest.mark.system("google.marketing_platform")
@pytest.mark.credential_file(GMP_KEY)
class TestSystemGoogleAds(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@pytest.mark.usefixtures("helper")
@provide_gcp_context(GMP_KEY, scopes=SCOPES)
def test_run_example_dag(self):
self.run_dag('example_google_analytics', MARKETING_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()
diff --git a/tests/providers/google/suite/operators/test_sheets_system.py
b/tests/providers/google/suite/operators/test_sheets_system.py
index b6705b8..b8b341b 100644
--- a/tests/providers/google/suite/operators/test_sheets_system.py
+++ b/tests/providers/google/suite/operators/test_sheets_system.py
@@ -38,7 +38,13 @@ def helper():
@pytest.mark.system("google.suite")
@pytest.mark.credential_file(GCP_GCS_KEY)
class TestSystemSheetsToGcs(GoogleSystemTest):
+ def setUp(self):
+ super().setUp()
+
@pytest.mark.usefixtures("helper")
@provide_gcp_context(GCP_GCS_KEY, scopes=SCOPES)
def test_run_example_dag(self):
self.run_dag('example_sheets_gcs', GSUITE_DAG_FOLDER)
+
+ def tearDown(self):
+ super().tearDown()