potiuk commented on code in PR #23731:
URL: https://github.com/apache/airflow/pull/23731#discussion_r877427392
##########
tests/jobs/test_scheduler_job.py:
##########
@@ -4120,22 +4120,40 @@ def
test_task_with_upstream_skip_process_task_instances():
dr = dag.create_dagrun(run_type=DagRunType.MANUAL, state=State.RUNNING,
execution_date=DEFAULT_DATE)
assert dr is not None
+ # def get_ti_from_db(task):
+ # return (
+ # session.query(TaskInstance)
+ # .filter(
+ # TaskInstance.dag_id == dag.dag_id,
+ # TaskInstance.task_id == task.task_id,
+ # TaskInstance.run_id == dr.run_id,
+ # )
+ # .one()
+ # )
+
+ # with create_session() as session:
+ # get_ti_from_db(dummy1).state = State.SKIPPED
+ # get_ti_from_db(dummy2).state = State.SUCCESS
+ # session.flush()
+
with create_session() as session:
tis = {ti.task_id: ti for ti in dr.get_task_instances(session=session)}
# Set dummy1 to skipped and dummy2 to success. dummy3 remains as none.
tis[dummy1.task_id].state = State.SKIPPED
tis[dummy2.task_id].state = State.SUCCESS
assert tis[dummy3.task_id].state == State.NONE
+ session.commit()
+ # dr.refresh_from_db(session=session)
# dag_runs = DagRun.find(dag_id='test_task_with_upstream_skip_dag')
Review Comment:
not really sure, I am afraid - those are pretty deep dive tests and they
might require qutie a bit deeper knowledge (and time to look it up).
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]