This is an automated email from the ASF dual-hosted git repository.

eladkal pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 681b884525 Refactor Compute Engine system tests (#40538)
681b884525 is described below

commit 681b884525da01be0e3768ddced26d826ff5ef2b
Author: VladaZakharova <[email protected]>
AuthorDate: Sun Jul 28 07:35:40 2024 +0200

    Refactor Compute Engine system tests (#40538)
---
 .../google/cloud/compute/example_compute.py        | 54 +++++++++++-----------
 .../google/cloud/compute/example_compute_igm.py    | 40 ++++++++--------
 .../google/cloud/compute/example_compute_ssh.py    | 22 +++++----
 .../cloud/compute/example_compute_ssh_os_login.py  | 24 +++++-----
 .../cloud/compute/example_compute_ssh_parallel.py  | 18 ++++----
 5 files changed, 84 insertions(+), 74 deletions(-)

diff --git a/tests/system/providers/google/cloud/compute/example_compute.py 
b/tests/system/providers/google/cloud/compute/example_compute.py
index 055d84bfb9..6be263c9fa 100644
--- a/tests/system/providers/google/cloud/compute/example_compute.py
+++ b/tests/system/providers/google/cloud/compute/example_compute.py
@@ -27,7 +27,6 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
-from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.operators.compute import (
     ComputeEngineDeleteInstanceOperator,
@@ -43,12 +42,12 @@ from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 # [START howto_operator_gce_args_common]
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or 
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 DAG_ID = "cloud_compute"
 
-LOCATION = "europe-west1-b"
-REGION = "europe-west1"
+LOCATION = "europe-west2-b"
+REGION = "europe-west2"
 GCE_INSTANCE_NAME = "instance-compute-test"
 SHORT_MACHINE_TYPE_NAME = "n1-standard-1"
 TEMPLATE_NAME = "instance-template"
@@ -104,7 +103,7 @@ with DAG(
     schedule="@once",
     start_date=datetime(2021, 1, 1),
     catchup=False,
-    tags=["example"],
+    tags=["example", "compute"],
 ) as dag:
     # [START howto_operator_gce_insert]
     gce_instance_insert = ComputeEngineInsertInstanceOperator(
@@ -184,9 +183,9 @@ with DAG(
         project_id=PROJECT_ID,
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_stop]
-    gce_instance_stop.trigger_rule = TriggerRule.ALL_DONE
 
     # Duplicate stop for idempotence testing
     # [START howto_operator_gce_stop_no_project_id]
@@ -194,9 +193,9 @@ with DAG(
         task_id="gcp_compute_stop_task_2",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_stop_no_project_id]
-    gce_instance_stop2.trigger_rule = TriggerRule.ALL_DONE
 
     # [START howto_operator_gce_set_machine_type]
     gce_set_machine_type = ComputeEngineSetMachineTypeOperator(
@@ -223,43 +222,46 @@ with DAG(
         task_id="gcp_compute_delete_instance_task",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_no_project_id]
-    gce_instance_delete.trigger_rule = TriggerRule.ALL_DONE
 
     # [START howto_operator_gce_delete_no_project_id]
     gce_instance_delete2 = ComputeEngineDeleteInstanceOperator(
         task_id="gcp_compute_delete_instance_task_2",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_no_project_id]
-    gce_instance_delete.trigger_rule = TriggerRule.ALL_DONE
 
     # [START howto_operator_gce_delete_new_template_no_project_id]
     gce_instance_template_delete = ComputeEngineDeleteInstanceTemplateOperator(
         task_id="gcp_compute_delete_template_task",
         resource_id=TEMPLATE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_new_template_no_project_id]
-    gce_instance_template_delete.trigger_rule = TriggerRule.ALL_DONE
 
-    chain(
-        gce_instance_insert,
-        gce_instance_insert2,
-        gce_instance_delete,
-        gce_instance_template_insert,
-        gce_instance_template_insert2,
-        gce_instance_insert_from_template,
-        gce_instance_insert_from_template2,
-        gce_instance_start,
-        gce_instance_start2,
-        gce_instance_stop,
-        gce_instance_stop2,
-        gce_set_machine_type,
-        gce_set_machine_type2,
-        gce_instance_delete2,
-        gce_instance_template_delete,
+    (
+        # TEST SETUP
+        gce_instance_insert
+        >> gce_instance_insert2
+        # TEST BODY
+        >> gce_instance_delete
+        >> gce_instance_template_insert
+        >> gce_instance_template_insert2
+        >> gce_instance_insert_from_template
+        >> gce_instance_insert_from_template2
+        >> gce_instance_start
+        >> gce_instance_start2
+        >> gce_instance_stop
+        >> gce_instance_stop2
+        >> gce_set_machine_type
+        >> gce_set_machine_type2
+        # TEST TEARDOWN
+        >> gce_instance_delete2
+        >> gce_instance_template_delete
     )
 
     # ### Everything below this line is not part of example ###
diff --git a/tests/system/providers/google/cloud/compute/example_compute_igm.py 
b/tests/system/providers/google/cloud/compute/example_compute_igm.py
index 60bd196a35..04f1e62b91 100644
--- a/tests/system/providers/google/cloud/compute/example_compute_igm.py
+++ b/tests/system/providers/google/cloud/compute/example_compute_igm.py
@@ -28,7 +28,6 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
-from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.operators.compute import (
     ComputeEngineCopyInstanceTemplateOperator,
@@ -41,7 +40,7 @@ from airflow.providers.google.cloud.operators.compute import (
 from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or 
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 LOCATION = "europe-west1-b"
@@ -115,7 +114,7 @@ with DAG(
     schedule="@once",
     start_date=datetime(2021, 1, 1),
     catchup=False,
-    tags=["example"],
+    tags=["example", "compute-igm"],
 ) as dag:
     # [START howto_operator_gce_igm_insert_template]
     gce_instance_template_insert = ComputeEngineInsertInstanceTemplateOperator(
@@ -196,39 +195,42 @@ with DAG(
     gce_instance_template_old_delete = 
ComputeEngineDeleteInstanceTemplateOperator(
         task_id="gcp_compute_delete_old_template_task",
         resource_id=TEMPLATE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_old_template_no_project_id]
-    gce_instance_template_old_delete.trigger_rule = TriggerRule.ALL_DONE
 
     # [START howto_operator_gce_delete_new_template_no_project_id]
     gce_instance_template_new_delete = 
ComputeEngineDeleteInstanceTemplateOperator(
         task_id="gcp_compute_delete_new_template_task",
         resource_id=NEW_TEMPLATE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_new_template_no_project_id]
-    gce_instance_template_new_delete.trigger_rule = TriggerRule.ALL_DONE
 
     # [START howto_operator_gce_delete_igm_no_project_id]
     gce_igm_delete = ComputeEngineDeleteInstanceGroupManagerOperator(
         task_id="gcp_compute_delete_group_task",
         resource_id=INSTANCE_GROUP_MANAGER_NAME,
         zone=LOCATION,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_igm_no_project_id]
-    gce_igm_delete.trigger_rule = TriggerRule.ALL_DONE
-
-    chain(
-        gce_instance_template_insert,
-        gce_instance_template_insert2,
-        gce_instance_template_copy,
-        gce_instance_template_copy2,
-        gce_igm_insert,
-        gce_igm_insert2,
-        gce_instance_group_manager_update_template,
-        gce_instance_group_manager_update_template2,
-        gce_igm_delete,
-        gce_instance_template_old_delete,
-        gce_instance_template_new_delete,
+
+    (
+        # TEST SETUP
+        gce_instance_template_insert
+        >> gce_instance_template_insert2
+        >> gce_instance_template_copy
+        >> gce_instance_template_copy2
+        # TEST BODY
+        >> gce_igm_insert
+        >> gce_igm_insert2
+        >> gce_instance_group_manager_update_template
+        >> gce_instance_group_manager_update_template2
+        # TEST TEARDOWN
+        >> gce_igm_delete
+        >> gce_instance_template_old_delete
+        >> gce_instance_template_new_delete
     )
 
     # ### Everything below this line is not part of example ###
diff --git a/tests/system/providers/google/cloud/compute/example_compute_ssh.py 
b/tests/system/providers/google/cloud/compute/example_compute_ssh.py
index 7716f19d90..bb5f766655 100644
--- a/tests/system/providers/google/cloud/compute/example_compute_ssh.py
+++ b/tests/system/providers/google/cloud/compute/example_compute_ssh.py
@@ -26,7 +26,6 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
-from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.hooks.compute_ssh import 
ComputeEngineSSHHook
 from airflow.providers.google.cloud.operators.compute import (
@@ -38,12 +37,12 @@ from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 # [START howto_operator_gce_args_common]
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or 
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 DAG_ID = "cloud_compute_ssh"
-LOCATION = "europe-west1-b"
-REGION = "europe-west1"
+LOCATION = "europe-west2-b"
+REGION = "europe-west2"
 GCE_INSTANCE_NAME = "instance-ssh-test"
 SHORT_MACHINE_TYPE_NAME = "n1-standard-1"
 GCE_INSTANCE_BODY = {
@@ -122,15 +121,18 @@ with DAG(
         task_id="gcp_compute_delete_instance_task",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_no_project_id]
-    gce_instance_delete.trigger_rule = TriggerRule.ALL_DONE
 
-    chain(
-        gce_instance_insert,
-        metadata_without_iap_tunnel1,
-        metadata_without_iap_tunnel2,
-        gce_instance_delete,
+    (
+        # TEST SETUP
+        gce_instance_insert
+        # TEST BODY
+        >> metadata_without_iap_tunnel1
+        >> metadata_without_iap_tunnel2
+        # TEST TEARDOWN
+        >> gce_instance_delete
     )
 
     # ### Everything below this line is not part of example ###
diff --git 
a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py 
b/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py
index 6e21a429f0..c479d17297 100644
--- 
a/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py
+++ 
b/tests/system/providers/google/cloud/compute/example_compute_ssh_os_login.py
@@ -26,7 +26,6 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
-from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.hooks.compute_ssh import 
ComputeEngineSSHHook
 from airflow.providers.google.cloud.operators.compute import (
@@ -38,12 +37,12 @@ from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 # [START howto_operator_gce_args_common]
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or 
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 DAG_ID = "cloud_compute_ssh_os_login"
-LOCATION = "europe-west1-b"
-REGION = "europe-west1"
+LOCATION = "europe-west2-b"
+REGION = "europe-west2"
 GCE_INSTANCE_NAME = "instance-ssh-test-oslogin"
 SHORT_MACHINE_TYPE_NAME = "n1-standard-1"
 GCE_INSTANCE_BODY = {
@@ -80,7 +79,7 @@ GCE_INSTANCE_BODY = {
 
 with DAG(
     DAG_ID,
-    schedule="@once",
+    schedule_interval="@once",
     start_date=datetime(2021, 1, 1),
     catchup=False,
     tags=["example", "compute-ssh", "os-login"],
@@ -130,15 +129,18 @@ with DAG(
         task_id="gcp_compute_delete_instance_task",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_no_project_id]
-    gce_instance_delete.trigger_rule = TriggerRule.ALL_DONE
 
-    chain(
-        gce_instance_insert,
-        os_login_task1,
-        os_login_task2,
-        gce_instance_delete,
+    (
+        # TEST SETUP
+        gce_instance_insert
+        # TEST BODY
+        >> os_login_task1
+        >> os_login_task2
+        # TEST TEARDOWN
+        >> gce_instance_delete
     )
 
     # ### Everything below this line is not part of example ###
diff --git 
a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py 
b/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py
index badb34b50b..3a7b7b1ecd 100644
--- 
a/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py
+++ 
b/tests/system/providers/google/cloud/compute/example_compute_ssh_parallel.py
@@ -26,7 +26,6 @@ from __future__ import annotations
 import os
 from datetime import datetime
 
-from airflow.models.baseoperator import chain
 from airflow.models.dag import DAG
 from airflow.providers.google.cloud.hooks.compute_ssh import 
ComputeEngineSSHHook
 from airflow.providers.google.cloud.operators.compute import (
@@ -38,7 +37,7 @@ from airflow.utils.trigger_rule import TriggerRule
 from tests.system.providers.google import DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 # [START howto_operator_gce_args_common]
-ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID", "default")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") or 
DEFAULT_GCP_SYSTEM_TEST_PROJECT_ID
 
 DAG_ID = "cloud_compute_ssh_parallel"
@@ -72,7 +71,7 @@ GCE_INSTANCE_BODY = {
 
 with DAG(
     DAG_ID,
-    schedule="@once",
+    schedule_interval="@once",
     start_date=datetime(2021, 1, 1),
     catchup=False,
     tags=["example", "compute-ssh-parallel"],
@@ -124,14 +123,17 @@ with DAG(
         task_id="gcp_compute_delete_instance_task",
         zone=LOCATION,
         resource_id=GCE_INSTANCE_NAME,
+        trigger_rule=TriggerRule.ALL_DONE,
     )
     # [END howto_operator_gce_delete_no_project_id]
-    gce_instance_delete.trigger_rule = TriggerRule.ALL_DONE
 
-    chain(
-        gce_instance_insert,
-        [metadata_without_iap_tunnel, metadata_with_iap_tunnel],
-        gce_instance_delete,
+    (
+        # TEST SETUP
+        gce_instance_insert
+        # TEST BODY
+        >> [metadata_without_iap_tunnel, metadata_with_iap_tunnel]
+        # TEST TEARDOWN
+        >> gce_instance_delete
     )
 
     # ### Everything below this line is not part of example ###

Reply via email to