This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 6c854dcb16 Migrating Google AutoML example_dags to sys tests (#32368)
6c854dcb16 is described below

commit 6c854dcb164caffea640b368eda94a2a9166fa97
Author: Amogh Desai <[email protected]>
AuthorDate: Fri Jul 7 21:44:55 2023 +0530

    Migrating Google AutoML example_dags to sys tests (#32368)
    
    
    
    ---------
    
    Co-authored-by: Amogh Desai <[email protected]>
---
 .../example_automl_nl_text_classification.py       | 23 +++++++++++++++-------
 .../automl}/example_automl_nl_text_sentiment.py    | 20 ++++++++++++++-----
 .../cloud/automl}/example_automl_translation.py    | 22 ++++++++++++++++-----
 ...ple_automl_video_intelligence_classification.py | 20 ++++++++++++++-----
 .../example_automl_video_intelligence_tracking.py  | 19 ++++++++++++++----
 .../example_automl_vision_object_detection.py      | 20 ++++++++++++++-----
 6 files changed, 93 insertions(+), 31 deletions(-)

diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py
 
b/tests/system/providers/google/cloud/automl/example_automl_nl_text_classification.py
similarity index 87%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_nl_text_classification.py
index afe28a9ff8..3912916209 100644
--- 
a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_classification.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_nl_text_classification.py
@@ -39,13 +39,9 @@ GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", 
"your-project-id")
 GCP_AUTOML_LOCATION = os.environ.get("GCP_AUTOML_LOCATION", "us-central1")
 GCP_AUTOML_TEXT_CLS_BUCKET = os.environ.get("GCP_AUTOML_TEXT_CLS_BUCKET", 
"gs://INVALID BUCKET NAME")
 
-# Example values
-DATASET_ID = ""
-
 # Example model
 MODEL = {
     "display_name": "auto_model_1",
-    "dataset_id": DATASET_ID,
     "text_classification_model_metadata": {},
 }
 
@@ -55,6 +51,7 @@ DATASET = {
     "text_classification_dataset_metadata": {"classification_type": 
"MULTICLASS"},
 }
 
+
 IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": 
[GCP_AUTOML_TEXT_CLS_BUCKET]}}
 
 extract_object_id = CloudAutoMLHook.extract_object_id
@@ -65,12 +62,13 @@ with models.DAG(
     start_date=datetime(2021, 1, 1),
     catchup=False,
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
 
     dataset_id = cast(str, XComArg(create_dataset_task, key="dataset_id"))
+    MODEL["dataset_id"] = dataset_id
 
     import_dataset_task = AutoMLImportDataOperator(
         task_id="import_dataset_task",
@@ -78,11 +76,9 @@ with models.DAG(
         location=GCP_AUTOML_LOCATION,
         input_config=IMPORT_INPUT_CONFIG,
     )
-
     MODEL["dataset_id"] = dataset_id
 
     create_model = AutoMLTrainModelOperator(task_id="create_model", 
model=MODEL, location=GCP_AUTOML_LOCATION)
-
     model_id = cast(str, XComArg(create_model, key="model_id"))
 
     delete_model_task = AutoMLDeleteModelOperator(
@@ -99,10 +95,23 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
     #   create_dataset_task >> import_dataset_task
     #   create_dataset_task >> create_model
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py
 
b/tests/system/providers/google/cloud/automl/example_automl_nl_text_sentiment.py
similarity index 88%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_nl_text_sentiment.py
index a823b8af89..6bbe0d2bfb 100644
--- 
a/airflow/providers/google/cloud/example_dags/example_automl_nl_text_sentiment.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_nl_text_sentiment.py
@@ -39,13 +39,9 @@ GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", 
"your-project-id")
 GCP_AUTOML_LOCATION = os.environ.get("GCP_AUTOML_LOCATION", "us-central1")
 GCP_AUTOML_SENTIMENT_BUCKET = os.environ.get("GCP_AUTOML_SENTIMENT_BUCKET", 
"gs://INVALID BUCKET NAME")
 
-# Example values
-DATASET_ID = ""
-
 # Example model
 MODEL = {
     "display_name": "auto_model_1",
-    "dataset_id": DATASET_ID,
     "text_sentiment_model_metadata": {},
 }
 
@@ -66,12 +62,13 @@ with models.DAG(
     catchup=False,
     user_defined_macros={"extract_object_id": extract_object_id},
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
 
     dataset_id = cast(str, XComArg(create_dataset_task, key="dataset_id"))
+    MODEL["dataset_id"] = dataset_id
 
     import_dataset_task = AutoMLImportDataOperator(
         task_id="import_dataset_task",
@@ -100,7 +97,9 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
@@ -108,3 +107,14 @@ with models.DAG(
     #   create_dataset_task >> create_model
     #   create_model >> delete_model_task
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_translation.py 
b/tests/system/providers/google/cloud/automl/example_automl_translation.py
similarity index 88%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_translation.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_translation.py
index 2bef20caab..87bf7166dc 100644
--- a/airflow/providers/google/cloud/example_dags/example_automl_translation.py
+++ b/tests/system/providers/google/cloud/automl/example_automl_translation.py
@@ -41,13 +41,9 @@ GCP_AUTOML_TRANSLATION_BUCKET = os.environ.get(
     "GCP_AUTOML_TRANSLATION_BUCKET", "gs://INVALID BUCKET NAME/file"
 )
 
-# Example values
-DATASET_ID = "TRL123456789"
-
 # Example model
 MODEL = {
     "display_name": "auto_model_1",
-    "dataset_id": DATASET_ID,
     "translation_model_metadata": {},
 }
 
@@ -60,6 +56,7 @@ DATASET = {
     },
 }
 
+
 IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": 
[GCP_AUTOML_TRANSLATION_BUCKET]}}
 
 extract_object_id = CloudAutoMLHook.extract_object_id
@@ -69,10 +66,11 @@ extract_object_id = CloudAutoMLHook.extract_object_id
 with models.DAG(
     "example_automl_translation",
     start_date=datetime(2021, 1, 1),
+    schedule="@once",
     catchup=False,
     user_defined_macros={"extract_object_id": extract_object_id},
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
@@ -106,7 +104,9 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
@@ -114,3 +114,15 @@ with models.DAG(
     #   create_dataset_task >> create_model
     #   create_model >> delete_model_task
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py
 
b/tests/system/providers/google/cloud/automl/example_automl_video_intelligence_classification.py
similarity index 88%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_video_intelligence_classification.py
index 2b55c42a8a..63447a1230 100644
--- 
a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_classification.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_video_intelligence_classification.py
@@ -41,13 +41,9 @@ GCP_AUTOML_VIDEO_BUCKET = os.environ.get(
     "GCP_AUTOML_VIDEO_BUCKET", "gs://INVALID BUCKET NAME/hmdb_split1.csv"
 )
 
-# Example values
-DATASET_ID = "VCN123455678"
-
 # Example model
 MODEL = {
     "display_name": "auto_model_1",
-    "dataset_id": DATASET_ID,
     "video_classification_model_metadata": {},
 }
 
@@ -69,12 +65,13 @@ with models.DAG(
     catchup=False,
     user_defined_macros={"extract_object_id": extract_object_id},
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
 
     dataset_id = cast(str, XComArg(create_dataset_task, key="dataset_id"))
+    MODEL["dataset_id"] = dataset_id
 
     import_dataset_task = AutoMLImportDataOperator(
         task_id="import_dataset_task",
@@ -103,7 +100,9 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
@@ -111,3 +110,14 @@ with models.DAG(
     #   create_dataset_task >> create_model
     #   create_model >> delete_model_task
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py
 
b/tests/system/providers/google/cloud/automl/example_automl_video_intelligence_tracking.py
similarity index 88%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_video_intelligence_tracking.py
index daed5748b2..5ac8fa7457 100644
--- 
a/airflow/providers/google/cloud/example_dags/example_automl_video_intelligence_tracking.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_video_intelligence_tracking.py
@@ -42,13 +42,10 @@ GCP_AUTOML_TRACKING_BUCKET = os.environ.get(
     "gs://INVALID BUCKET NAME/youtube_8m_videos_animal_tiny.csv",
 )
 
-# Example values
-DATASET_ID = "VOT123456789"
 
 # Example model
 MODEL = {
     "display_name": "auto_model_1",
-    "dataset_id": DATASET_ID,
     "video_object_tracking_model_metadata": {},
 }
 
@@ -70,12 +67,13 @@ with models.DAG(
     catchup=False,
     user_defined_macros={"extract_object_id": extract_object_id},
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
 
     dataset_id = cast(str, XComArg(create_dataset_task, key="dataset_id"))
+    MODEL["dataset_id"] = dataset_id
 
     import_dataset_task = AutoMLImportDataOperator(
         task_id="import_dataset_task",
@@ -104,7 +102,9 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
@@ -112,3 +112,14 @@ with models.DAG(
     #   create_dataset_task >> create_model
     #   create_model >> delete_model_task
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)
diff --git 
a/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py
 
b/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py
similarity index 88%
rename from 
airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py
rename to 
tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py
index 8b9ae271d1..933c2760cb 100644
--- 
a/airflow/providers/google/cloud/example_dags/example_automl_vision_object_detection.py
+++ 
b/tests/system/providers/google/cloud/automl/example_automl_vision_object_detection.py
@@ -41,13 +41,9 @@ GCP_AUTOML_DETECTION_BUCKET = os.environ.get(
     "GCP_AUTOML_DETECTION_BUCKET", "gs://INVALID BUCKET 
NAME/img/openimage/csv/salads_ml_use.csv"
 )
 
-# Example values
-DATASET_ID = ""
-
 # Example model
 MODEL = {
     "display_name": "auto_model",
-    "dataset_id": DATASET_ID,
     "image_object_detection_model_metadata": {},
 }
 
@@ -69,12 +65,13 @@ with models.DAG(
     catchup=False,
     user_defined_macros={"extract_object_id": extract_object_id},
     tags=["example"],
-) as example_dag:
+) as dag:
     create_dataset_task = AutoMLCreateDatasetOperator(
         task_id="create_dataset_task", dataset=DATASET, 
location=GCP_AUTOML_LOCATION
     )
 
     dataset_id = cast(str, XComArg(create_dataset_task, key="dataset_id"))
+    MODEL["dataset_id"] = dataset_id
 
     import_dataset_task = AutoMLImportDataOperator(
         task_id="import_dataset_task",
@@ -103,7 +100,9 @@ with models.DAG(
         project_id=GCP_PROJECT_ID,
     )
 
+    # TEST BODY
     import_dataset_task >> create_model
+    # TEST TEARDOWN
     delete_model_task >> delete_datasets_task
 
     # Task dependencies created via `XComArgs`:
@@ -111,3 +110,14 @@ with models.DAG(
     #   create_dataset_task >> create_model
     #   create_model >> delete_model_task
     #   create_dataset_task >> delete_datasets_task
+
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "tearDown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
+from tests.system.utils import get_test_run  # noqa: E402
+
+# Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)
+test_run = get_test_run(dag)

Reply via email to