This is an automated email from the ASF dual-hosted git repository.
vincbeck pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git
The following commit(s) were added to refs/heads/main by this push:
new 4dfe9f0d2f2 Decrease the batch inference size for
example_bedrock_batch_inference (#57912)
4dfe9f0d2f2 is described below
commit 4dfe9f0d2f289e1bfd7c78eca8f08d1e4d02ec40
Author: Niko Oliveira <[email protected]>
AuthorDate: Thu Nov 6 06:30:33 2025 -0800
Decrease the batch inference size for example_bedrock_batch_inference
(#57912)
The system tests just needs to run the minimum batch size to test
functionality. The shorter batch should decrease test runtime and
resource usage.
Also fix a bug that caused the prompts list to be an unreliable size,
there was a missing flush on the temp file.
---
.../tests/system/amazon/aws/example_bedrock_batch_inference.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git
a/providers/amazon/tests/system/amazon/aws/example_bedrock_batch_inference.py
b/providers/amazon/tests/system/amazon/aws/example_bedrock_batch_inference.py
index 207bce19da6..8e7d28b4e36 100644
---
a/providers/amazon/tests/system/amazon/aws/example_bedrock_batch_inference.py
+++
b/providers/amazon/tests/system/amazon/aws/example_bedrock_batch_inference.py
@@ -71,7 +71,7 @@ CLAUDE_MODEL_ID = "anthropic.claude-3-5-sonnet-20241022-v2:0"
ANTHROPIC_VERSION = "bedrock-2023-05-31"
# Batch inferences currently require a minimum of 100 prompts per batch.
-MIN_NUM_PROMPTS = 300
+MIN_NUM_PROMPTS = 100
PROMPT_TEMPLATE = "Even numbers are red. Odd numbers are blue. What color is
{n}?"
@@ -98,6 +98,9 @@ def generate_prompts(_env_id: str, _bucket: str, _key: str):
# Convert each prompt to serialized json, append a newline, and write
that line to the temp file.
tmp_file.writelines(json.dumps(prompt) + "\n" for prompt in prompts)
+ # Flush the buffer to ensure all data is written to disk before upload
+ tmp_file.flush()
+
# Upload the file to S3.
S3Hook().conn.upload_file(tmp_file.name, _bucket, _key)