uranusjr commented on code in PR #32538:
URL: https://github.com/apache/airflow/pull/32538#discussion_r1260472292
##########
airflow/operators/python.py:
##########
@@ -250,25 +251,35 @@ def execute(self, context: Context) -> Any:
if condition:
self.log.info("Proceeding with downstream tasks...")
return condition
-
- downstream_tasks = context["task"].get_flat_relatives(upstream=False)
+ downstream_tasks = [
+ x for x in context["task"].get_flat_relatives(upstream=False) if
not x.is_teardown
+ ]
self.log.debug("Downstream task IDs %s", downstream_tasks)
if downstream_tasks:
dag_run = context["dag_run"]
execution_date = dag_run.execution_date
-
+ if TYPE_CHECKING:
+ assert isinstance(execution_date, DateTime)
if self.ignore_downstream_trigger_rules is True:
self.log.info("Skipping all downstream tasks...")
- self.skip(dag_run, execution_date, downstream_tasks,
map_index=context["ti"].map_index)
+ self.skip(
+ dag_run=dag_run,
+ execution_date=execution_date,
+ tasks=downstream_tasks,
+ map_index=context["ti"].map_index,
+ )
else:
self.log.info("Skipping downstream tasks while respecting
trigger rules...")
# Explicitly setting the state of the direct, downstream
task(s) to "skipped" and letting the
# Scheduler handle the remaining downstream task(s)
appropriately.
+ to_skip = [
+ x for x in
context["task"].get_direct_relatives(upstream=False) if not x.is_teardown
+ ]
Review Comment:
I wonder instead of getting the tasks again (which is surpringly not that
trivial), it may make more sense to simply filter from `downstream_tasks`
```python
to_skip = [t for t in downstream_tasks if self.task_id in
t.upstream_task_ids]
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]