kaxil commented on a change in pull request #16233:
URL: https://github.com/apache/airflow/pull/16233#discussion_r649781077



##########
File path: airflow/www/views.py
##########
@@ -1807,54 +1808,104 @@ def _mark_task_instance_state(  # pylint: 
disable=too-many-arguments
 
         latest_execution_date = dag.get_latest_execution_date()
         if not latest_execution_date:
-            flash(f"Cannot make {state}, seem that dag {dag_id} has never 
run", "error")
+            flash(f"Cannot mark tasks as {state}, seem that dag {dag_id} has 
never run", "error")
             return redirect(origin)
 
         execution_date = timezone.parse(execution_date)
 
         from airflow.api.common.experimental.mark_tasks import set_state
 
-        if confirmed:
-            with create_session() as session:
-                altered = set_state(
-                    tasks=[task],
-                    execution_date=execution_date,
-                    upstream=upstream,
-                    downstream=downstream,
-                    future=future,
-                    past=past,
-                    state=state,
-                    commit=True,
-                    session=session,
-                )
+        with create_session() as session:
+            altered = set_state(
+                tasks=[task],
+                execution_date=execution_date,
+                upstream=upstream,
+                downstream=downstream,
+                future=future,
+                past=past,
+                state=state,
+                commit=True,
+                session=session,
+            )
 
-                # Clear downstream tasks that are in failed/upstream_failed 
state to resume them.
-                # Flush the session so that the tasks marked success are 
reflected in the db.
-                session.flush()
-                subdag = dag.partial_subset(
-                    task_ids_or_regex={task_id},
-                    include_downstream=True,
-                    include_upstream=False,
-                )
+            # Clear downstream tasks that are in failed/upstream_failed state 
to resume them.
+            # Flush the session so that the tasks marked success are reflected 
in the db.
+            session.flush()
+            subdag = dag.partial_subset(
+                task_ids_or_regex={task_id},
+                include_downstream=True,
+                include_upstream=False,
+            )
 
-                end_date = execution_date if not future else None
-                start_date = execution_date if not past else None
-
-                subdag.clear(
-                    start_date=start_date,
-                    end_date=end_date,
-                    include_subdags=True,
-                    include_parentdag=True,
-                    only_failed=True,
-                    session=session,
-                    # Exclude the task itself from being cleared
-                    exclude_task_ids={task_id},
-                )
+            end_date = execution_date if not future else None
+            start_date = execution_date if not past else None
 
-                session.commit()
+            subdag.clear(
+                start_date=start_date,
+                end_date=end_date,
+                include_subdags=True,
+                include_parentdag=True,
+                only_failed=True,
+                session=session,
+                # Exclude the task itself from being cleared
+                exclude_task_ids={task_id},
+            )
 
-            flash(f"Marked {state} on {len(altered)} task instances")
-            return redirect(origin)
+            session.commit()
+
+        flash(f"Marked {state} on {len(altered)} task instances")
+        return redirect(origin)
+
+    @expose('/confirm', methods=['GET'])
+    @auth.has_access(
+        [
+            (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG),
+            (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_TASK_INSTANCE),
+        ]
+    )
+    @action_logging
+    def confirm(self):
+        """Show confirmation page for marking tasks as success or failed."""
+        args = request.args
+        dag_id = args.get('dag_id')
+        task_id = args.get('task_id')
+        execution_date = args.get('execution_date')
+        state = args.get('state')
+
+        upstream = to_boolean(args.get('failed_upstream'))
+        downstream = to_boolean(args.get('failed_downstream'))
+        future = to_boolean(args.get('failed_future'))
+        past = to_boolean(args.get('failed_past'))
+
+        try:
+            dag = current_app.dag_bag.get_dag(dag_id)
+        except airflow.exceptions.SerializedDagNotFound:
+            flash(f'DAG {dag_id} not found', "error")
+            return redirect(request.referrer or url_for('Airflow.index'))
+
+        try:
+            task = dag.get_task(task_id)
+        except airflow.exceptions.TaskNotFound:
+            flash(f"Task {task_id} not found", "error")
+            return redirect(request.referrer or url_for('Airflow.index'))
+
+        task.dag = dag
+
+        if state not in (
+            'success',
+            'failed',
+        ):
+            flash(f"Invalid state {state}, must be either 'success' or 
'failed'", "error")
+            return redirect(request.referrer or url_for('Airflow.index'))
+
+        latest_execution_date = dag.get_latest_execution_date()
+        if not latest_execution_date:
+            flash(f"Cannot mark tasks as {state}, seem that dag {dag_id} has 
never run", "error")
+            return redirect(request.referrer or url_for('Airflow.index'))
+
+        execution_date = timezone.parse(execution_date)
+
+        from airflow.api.common.experimental.mark_tasks import set_state

Review comment:
       I see some usages of experimental API in this file (unrelated to this 
PR). We should replace that with stable API




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]


Reply via email to