pierrejeambrun commented on code in PR #50132:
URL: https://github.com/apache/airflow/pull/50132#discussion_r2222713398
##########
airflow-ctl/tests/airflow_ctl/api/test_operations.py:
##########
@@ -106,6 +109,11 @@ def make_api_client(
return Client(base_url=base_url, transport=transport, token=token,
kind=kind)
+class HelloCollectionResponse(BaseModel):
+ hello: list[str]
Review Comment:
```suggestion
hellos: list[str]
```
##########
airflow-ctl/src/airflowctl/api/operations.py:
##########
@@ -148,6 +150,33 @@ def __init_subclass__(cls, **kwargs):
if callable(value):
setattr(cls, attr,
_check_flag_and_exit_if_server_response_error(value))
+ def return_all_entries(
+ self,
+ *,
+ path: str,
+ total_entries: int,
+ data_model: type[BaseModel],
+ entry_list: list,
+ offset: int = 0,
+ limit: int = 50,
+ params: dict | None = None,
+ **kwargs,
+ ) -> list | ServerResponseError:
+ if params is None:
+ params = {}
+ params.update({"limit": limit}, **kwargs)
+ try:
+ while offset < total_entries:
+ params.update({"offset": offset})
+ self.response = self.client.get(path, params=params)
+ entry = data_model.model_validate_json(self.response.content)
+ offset = offset + limit # default limit params = 50
+ entry_list.append(entry)
+
+ return entry_list
+ except ServerResponseError as e:
+ raise e
Review Comment:
Ping to fix this.
##########
airflow-ctl/tests/airflow_ctl/api/test_operations.py:
##########
@@ -114,6 +122,56 @@ def test_server_connection_refused(self):
):
client.connections.get("1")
+ @pytest.mark.parametrize(
+ "total_entries, offset, limit, expected_response",
+ [
+ (0, 0, 50, []),
+ (1, 0, 50, [HelloCollectionResponse(hello=["hello"],
total_entries=1)]),
+ (3, 2, 50, [HelloCollectionResponse(hello=["hello"],
total_entries=3)]),
+ (
+ 20,
+ 5,
+ 5,
+ [
+ ("hello", ["hello"]),
+ ("total_entries", 20),
+ ("hello", ["hello"]),
+ ("total_entries", 20),
+ ("hello", ["hello"]),
+ ("total_entries", 20),
+ ],
+ ),
+ (2, 3, 50, []),
+ ],
+ )
+ def test_execute_list(self, total_entries, limit, offset,
expected_response):
+ mock_operation = MagicMock(spec=BaseOperations)
+ mocked_response = []
+ total_entries = total_entries
+ if total_entries < limit:
+ if offset < total_entries:
+ if offset == 0:
+
mocked_response.extend([HelloCollectionResponse(hello=["hello"],
total_entries=1)])
+ mock_operation.execute_list.return_value = mocked_response
+ elif offset > 0:
+
mocked_response.extend([HelloCollectionResponse(hello=["hello"],
total_entries=3)])
+ mock_operation.execute_list.return_value = mocked_response
+ else:
+ mocked_response.extend([])
+ mock_operation.execute_list.return_value = mocked_response
+ else:
+ while offset < total_entries:
+ response = HelloCollectionResponse(hello=["hello"],
total_entries=total_entries)
+ mocked_response.extend(response)
+ offset += limit
+ mock_operation.execute_list.return_value = mocked_response
+ assert (
+ mock_operation.execute_list(
+ path="", data_model=HelloCollectionResponse, offset=offset,
limit=limit
+ )
+ == expected_response
Review Comment:
This is super complicated for no reason and need some simplification. For
all cases, iterate over the number of pages, and add a response corresponding
to the page. No need to make cases for limit offset total_entries, etc...
(You'll get `(total_entries // page_size) + 1` pages, and you create the
appropriate response for each of them)
Also the `hello=["hello"]` is most likely not correct because that should
have the `page_size` length probably
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]