ferruzzi commented on code in PR #24239:
URL: https://github.com/apache/airflow/pull/24239#discussion_r890455678


##########
tests/system/providers/amazon/aws/redshift/__init__.py:
##########
@@ -0,0 +1,17 @@
+#

Review Comment:
   So far, the plan has been that all tests should live at the 'aws' level, not 
in a subdirectory/module, just like everywhere else (aws/operators, aws/hooks, 
etc).  Is there a particular reason you went this route?



##########
tests/system/providers/amazon/aws/redshift/example_redshift_cluster.py:
##########
@@ -28,7 +28,11 @@
     RedshiftResumeClusterOperator,
 )
 from airflow.providers.amazon.aws.sensors.redshift_cluster import 
RedshiftClusterSensor
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.amazon.aws.utils import set_env_id
 
+ENV_ID = set_env_id()
+DAG_ID = 'example_redshift_cluster'

Review Comment:
   I can't comment below, but this should be used in the "with DAG" block:
   
   with DAG(
       dag_id=DAG_ID,
       ....
       ) as dag:



##########
tests/system/providers/amazon/aws/redshift/example_redshift_to_s3.py:
##########
@@ -20,13 +20,16 @@
 
 from airflow import DAG
 from airflow.providers.amazon.aws.transfers.redshift_to_s3 import 
RedshiftToS3Operator
+from tests.system.providers.amazon.aws.utils import set_env_id
 
+ENV_ID = set_env_id()
+DAG_ID = 'example_redshift_to_s3'
 S3_BUCKET_NAME = getenv("S3_BUCKET_NAME", "s3_bucket_name")
 S3_KEY = getenv("S3_KEY", "s3_key")
 REDSHIFT_TABLE = getenv("REDSHIFT_TABLE", "redshift_table")
 
 with DAG(
-    dag_id="example_redshift_to_s3",
+    dag_id=DAG_ID,

Review Comment:
   You did it in this one :stuck_out_tongue: 



##########
tests/system/providers/amazon/aws/redshift/example_redshift_sql.py:
##########
@@ -21,6 +21,11 @@
 from airflow import DAG
 from airflow.models.baseoperator import chain
 from airflow.providers.amazon.aws.operators.redshift_sql import 
RedshiftSQLOperator
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.amazon.aws.utils import set_env_id
+
+ENV_ID = set_env_id()
+DAG_ID = 'example_redshift_sql'

Review Comment:
   I can't comment below, but this should be used in the "with DAG" block:
   
   with DAG(
       dag_id=DAG_ID,
       ....
       ) as dag:



##########
tests/system/providers/amazon/aws/redshift/example_redshift_data_execute_sql.py:
##########
@@ -22,7 +22,11 @@
 from airflow.decorators import task
 from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook
 from airflow.providers.amazon.aws.operators.redshift_data import 
RedshiftDataOperator
+from airflow.utils.trigger_rule import TriggerRule
+from tests.system.providers.amazon.aws.utils import set_env_id
 
+ENV_ID = set_env_id()
+DAG_ID = 'example_redshift_cluster'

Review Comment:
   I can't comment below, but this should be used in the "with DAG" block:
   
   with DAG(
       dag_id=DAG_ID,
       ....
       ) as dag:



##########
tests/system/providers/amazon/aws/redshift/example_redshift_data_execute_sql.py:
##########
@@ -39,7 +43,7 @@
 POLL_INTERVAL = 10
 
 
-@task(task_id="output_results")
+@task(task_id="output_results", trigger_rule=TriggerRule.ALL_DONE)

Review Comment:
   Nitpick:  Is there a particular reason to change the name like that?   If 
you name the method 'output_results' then you don't need to explicitly set the 
task_id, or drop the task_id assignment and it generates one using the method 
name.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to