pierrejeambrun commented on code in PR #43875:
URL: https://github.com/apache/airflow/pull/43875#discussion_r1856845498


##########
tests/api_fastapi/core_api/routes/public/test_dag_run.py:
##########
@@ -652,3 +658,308 @@ def test_clear_dag_run_unprocessable_entity(self, 
test_client):
         body = response.json()
         assert body["detail"][0]["msg"] == "Field required"
         assert body["detail"][0]["loc"][0] == "body"
+
+
+# @pytest.mark.no_setup
+class TestTriggerDagRun:
+    def _dags_for_trigger_tests(self, session=None):
+        inactive_dag = DagModel(
+            dag_id="inactive",
+            fileloc="/tmp/dag_del_1.py",
+            timetable_summary="2 2 * * *",
+            is_active=False,
+            is_paused=True,
+            owners="test_owner,another_test_owner",
+            next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc),
+        )
+
+        import_errors_dag = DagModel(
+            dag_id="import_errors",
+            fileloc="/tmp/dag_del_2.py",
+            timetable_summary="2 2 * * *",
+            is_active=True,
+            owners="test_owner,another_test_owner",
+            next_dagrun=datetime(2021, 1, 1, 12, 0, 0, tzinfo=timezone.utc),
+        )
+        import_errors_dag.has_import_errors = True
+
+        session.add(inactive_dag)
+        session.add(import_errors_dag)
+        session.commit()
+
+    @time_machine.travel(timezone.utcnow(), tick=False)
+    @pytest.mark.parametrize(
+        "dag_run_id, note, data_interval_start, data_interval_end",
+        [
+            ("dag_run_5", "test-note", None, None),
+            (
+                "dag_run_6",
+                "test-note",
+                "2024-01-03T00:00:00+00:00",
+                "2024-01-04T05:00:00+00:00",
+            ),
+            (None, None, None, None),
+        ],
+    )
+    def test_should_respond_200(
+        self,
+        test_client,
+        dag_run_id,
+        note,
+        data_interval_start,
+        data_interval_end,
+    ):
+        fixed_now = timezone.utcnow().isoformat()
+
+        request_json = {"note": note}
+        if dag_run_id is not None:
+            request_json["dag_run_id"] = dag_run_id
+        if data_interval_start is not None:
+            request_json["data_interval_start"] = data_interval_start
+        if data_interval_end is not None:
+            request_json["data_interval_end"] = data_interval_end
+
+        response = test_client.post(
+            f"/public/dags/{DAG1_ID}/dagRuns",
+            json={
+                "dag_run_id": dag_run_id,
+                "note": note,
+                "data_interval_start": data_interval_start,
+                "data_interval_end": data_interval_end,
+            },
+        )
+        assert response.status_code == 200
+
+        if dag_run_id is None:
+            expected_dag_run_id = f"manual__{fixed_now}"
+        else:
+            expected_dag_run_id = dag_run_id
+
+        expected_data_interval_start = fixed_now.replace("+00:00", "Z")
+        expected_data_interval_end = fixed_now.replace("+00:00", "Z")
+        if data_interval_start is not None and data_interval_end is not None:
+            expected_data_interval_start = 
data_interval_start.replace("+00:00", "Z")
+            expected_data_interval_end = data_interval_end.replace("+00:00", 
"Z")
+
+        expected_response_json = {
+            "conf": {},
+            "dag_id": DAG1_ID,
+            "run_id": expected_dag_run_id,
+            "end_date": None,
+            "logical_date": fixed_now.replace("+00:00", "Z"),
+            "external_trigger": True,
+            "start_date": None,
+            "state": "queued",
+            "data_interval_end": expected_data_interval_end,
+            "data_interval_start": expected_data_interval_start,
+            "queued_at": fixed_now.replace("+00:00", "Z"),
+            "last_scheduling_decision": None,
+            "run_type": "manual",
+            "note": note,
+            "triggered_by": "rest_api",
+        }
+
+        assert response.json() == expected_response_json
+
+    @pytest.mark.parametrize(
+        "post_body, expected_detail",
+        [
+            (
+                {"executiondate": "2020-11-10T08:25:56Z"},
+                {
+                    "detail": [
+                        {
+                            "input": "2020-11-10T08:25:56Z",
+                            "loc": ["body", "executiondate"],
+                            "msg": "Extra inputs are not permitted",
+                            "type": "extra_forbidden",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"logical_date": "2020-11-10T08:25:56"},
+                {
+                    "detail": [
+                        {
+                            "input": "2020-11-10T08:25:56",
+                            "loc": ["body", "logical_date"],
+                            "msg": "Extra inputs are not permitted",
+                            "type": "extra_forbidden",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"data_interval_start": "2020-11-10T08:25:56"},
+                {
+                    "detail": [
+                        {
+                            "input": "2020-11-10T08:25:56",
+                            "loc": ["body", "data_interval_start"],
+                            "msg": "Input should have timezone info",
+                            "type": "timezone_aware",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"data_interval_end": "2020-11-10T08:25:56"},
+                {
+                    "detail": [
+                        {
+                            "input": "2020-11-10T08:25:56",
+                            "loc": ["body", "data_interval_end"],
+                            "msg": "Input should have timezone info",
+                            "type": "timezone_aware",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"dag_run_id": 20},
+                {
+                    "detail": [
+                        {
+                            "input": 20,
+                            "loc": ["body", "dag_run_id"],
+                            "msg": "Input should be a valid string",
+                            "type": "string_type",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"note": 20},
+                {
+                    "detail": [
+                        {
+                            "input": 20,
+                            "loc": ["body", "note"],
+                            "msg": "Input should be a valid string",
+                            "type": "string_type",
+                        }
+                    ]
+                },
+            ),
+            (
+                {"conf": 20},
+                {
+                    "detail": [
+                        {
+                            "input": 20,
+                            "loc": ["body", "conf"],
+                            "msg": "Input should be a valid dictionary",
+                            "type": "dict_type",
+                        }
+                    ]
+                },
+            ),
+        ],
+    )
+    def test_invalid_data(self, test_client, post_body, expected_detail):
+        response = test_client.post(f"/public/dags/{DAG1_ID}/dagRuns", 
json=post_body)
+        assert response.status_code == 422
+        assert response.json() == expected_detail
+
+    @mock.patch("airflow.models.DAG.create_dagrun")
+    def test_dagrun_creation_exception_is_handled(self, mock_create_dagrun, 
test_client):
+        error_message = "Encountered Error"
+
+        mock_create_dagrun.side_effect = ValueError(error_message)
+
+        response = test_client.post(f"/public/dags/{DAG1_ID}/dagRuns", json={})
+        assert response.status_code == 400
+        assert response.json() == {"detail": error_message}
+
+    def test_should_respond_404_if_a_dag_is_inactive(self, test_client, 
session):
+        self._dags_for_trigger_tests(session)
+        response = test_client.post("/public/dags/inactive/dagRuns", json={})
+        assert response.status_code == 404
+        assert response.json()["detail"] == "DAG with dag_id: 'inactive' not 
found"
+
+    def test_should_respond_400_if_a_dag_has_import_errors(self, test_client, 
session):
+        self._dags_for_trigger_tests(session)
+        response = test_client.post("/public/dags/import_errors/dagRuns", 
json={})
+        assert response.status_code == 400
+        assert (
+            response.json()["detail"]
+            == "DAG with dag_id: 'import_errors' has import errors and cannot 
be triggered"
+        )
+
+    @time_machine.travel(timezone.utcnow(), tick=False)
+    def test_should_response_200_for_duplicate_logical_date(self, test_client):
+        RUN_ID_1 = "random_1"
+        RUN_ID_2 = "random_2"
+        now = timezone.utcnow().isoformat().replace("+00:00", "Z")
+        note = "duplicate logical date test"
+        response_1 = test_client.post(
+            f"/public/dags/{DAG1_ID}/dagRuns",
+            json={"dag_run_id": RUN_ID_1, "note": note},
+        )
+        response_2 = test_client.post(
+            f"/public/dags/{DAG1_ID}/dagRuns",
+            json={"dag_run_id": RUN_ID_2, "note": note},
+        )
+
+        assert response_1.status_code == response_2.status_code == 200
+        body1 = response_1.json()
+        body2 = response_2.json()
+
+        for each_run_id, each_body in [(RUN_ID_1, body1), (RUN_ID_2, body2)]:
+            assert each_body == {
+                "run_id": each_run_id,
+                "dag_id": DAG1_ID,
+                "logical_date": now,
+                "queued_at": now,
+                "start_date": None,
+                "end_date": None,
+                "data_interval_start": now,
+                "data_interval_end": now,
+                "last_scheduling_decision": None,
+                "run_type": "manual",
+                "state": "queued",
+                "external_trigger": True,
+                "triggered_by": "rest_api",
+                "conf": {},
+                "note": note,
+            }
+
+    @pytest.mark.parametrize(
+        "data_interval_start, data_interval_end",
+        [
+            (
+                LOGICAL_DATE1.isoformat(),
+                None,
+            ),
+            (
+                None,
+                LOGICAL_DATE1.isoformat(),
+            ),
+        ],
+    )
+    def test_should_response_422_for_missing_start_date_or_end_date(

Review Comment:
   test_raises_validation_error_for_invalid_params



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to