This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new 550d1a5059 Fix delete_cluster no use TriggerRule.ALL_DONE (#24213)
550d1a5059 is described below

commit 550d1a5059c10b84dc40d7a66c203cc6514e6c63
Author: Chenglong Yan <[email protected]>
AuthorDate: Mon Jun 6 22:42:36 2022 +0800

    Fix delete_cluster no use TriggerRule.ALL_DONE (#24213)
    
    related: #24082
---
 .../system/providers/yandex/example_yandexcloud_dataproc.py | 13 +++++++++++--
 1 file changed, 11 insertions(+), 2 deletions(-)

diff --git a/tests/system/providers/yandex/example_yandexcloud_dataproc.py 
b/tests/system/providers/yandex/example_yandexcloud_dataproc.py
index 29636d363f..e8c2998adb 100644
--- a/tests/system/providers/yandex/example_yandexcloud_dataproc.py
+++ b/tests/system/providers/yandex/example_yandexcloud_dataproc.py
@@ -28,9 +28,12 @@ from airflow.providers.yandex.operators.yandexcloud_dataproc 
import (
     DataprocDeleteClusterOperator,
 )
 
+# Name of the datacenter where Dataproc cluster will be created
+from airflow.utils.trigger_rule import TriggerRule
+
 # should be filled with appropriate ids
 
-# Name of the datacenter where Dataproc cluster will be created
+
 AVAILABILITY_ZONE_ID = 'ru-central1-c'
 
 # Dataproc cluster jobs will produce logs in specified s3 bucket
@@ -151,12 +154,18 @@ with DAG(
     )
 
     delete_cluster = DataprocDeleteClusterOperator(
-        task_id='delete_cluster',
+        task_id='delete_cluster', trigger_rule=TriggerRule.ALL_DONE
     )
 
     create_cluster >> create_mapreduce_job >> create_hive_query >> 
create_hive_query_from_file
     create_hive_query_from_file >> create_spark_job >> create_pyspark_job >> 
delete_cluster
 
+    from tests.system.utils.watcher import watcher
+
+    # This test needs watcher in order to properly mark success/failure
+    # when "teardown" task with trigger rule is part of the DAG
+    list(dag.tasks) >> watcher()
+
 from tests.system.utils import get_test_run  # noqa: E402
 
 # Needed to run the example DAG with pytest (see: 
tests/system/README.md#run_via_pytest)

Reply via email to