branch: externals/llm
commit 2716829ee9babb3277bbb0af02137b38d9b9d480
Author: Andrew Hyatt <[email protected]>
Commit: GitHub <[email protected]>
Add structured output to Claude Sonnet 4.5, Opus 4.1, default model change
Changed the default model to Claude Sonnet 4.5.
Fixed issues with `max-tokens` setting with the newer models.
---
NEWS.org | 4 ++++
llm-claude.el | 35 +++++++++++++++++++++++++----------
llm-models.el | 9 +++++++--
3 files changed, 36 insertions(+), 12 deletions(-)
diff --git a/NEWS.org b/NEWS.org
index 2f9900058d..4691fef088 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -1,6 +1,10 @@
* Version 0.27.4
- Fix bug (or perhaps breaking change) in Ollama tool use.
- Add Gemini 3 model, update Gemini code to pass thought signatures
+- Add =json-response= capability to Claude 4.5 and 4.1 Opus models
+- Set Sonnet 4.5 as the default Claude model
+- Fix outdated max output settings in Claude
+- Add Claude Opus 4.5
* Version 0.27.3
- Add reasoning output for Gemini.
- Add Claude 4.5 Sonnet and Haiku to support models, fix model matching for
other Claude models.
diff --git a/llm-claude.el b/llm-claude.el
index 6f108595db..13771b3a8a 100644
--- a/llm-claude.el
+++ b/llm-claude.el
@@ -34,7 +34,7 @@
;; Models defined at https://docs.anthropic.com/claude/docs/models-overview
(cl-defstruct (llm-claude (:include llm-standard-chat-provider))
(key nil :read-only t)
- (chat-model "claude-sonnet-4-0" :read-only t))
+ (chat-model "claude-sonnet-4-5" :read-only t))
(cl-defmethod llm-nonfree-message-info ((_ llm-claude))
"Return Claude's nonfree ToS."
@@ -59,8 +59,8 @@
:max_tokens ,(or (llm-chat-prompt-max-tokens prompt)
(cond ((string-match "opus-4-0"
(llm-claude-chat-model provider))
32000)
- ((or (string-match "sonnet-4-0"
(llm-claude-chat-model provider))
- (string-match "sonnet-3-7"
(llm-claude-chat-model provider)))
+ ((or (string-match "4"
(llm-claude-chat-model provider))
+ (string-match "3-7"
(llm-claude-chat-model provider)))
64000)
((string-match "opus"
(llm-claude-chat-model provider))
4096)
@@ -95,6 +95,16 @@
(vconcat
(mapcar (lambda (f) (llm-claude--tool-call f))
(llm-chat-prompt-tools prompt))))))
+ (when (and (llm-chat-prompt-response-format prompt)
+ (member 'json-response (llm-capabilities provider))
+ (not (eq 'json (llm-chat-prompt-response-format prompt))))
+ (setq request (plist-put request :output_format
+ `(:type "json_schema"
+ :schema ,(let ((schema
(llm-chat-prompt-response-format prompt)))
+ (unless (plist-get schema
:additionalProperties)
+ (setq schema (plist-put
schema :additionalProperties :false)))
+
(llm-provider-utils-convert-to-serializable
+ schema))))))
(when (> (length system) 0)
(setq request (plist-put request :system system)))
(when (llm-chat-prompt-temperature prompt)
@@ -246,11 +256,13 @@ DATA is a vector of lists produced by
`llm-provider-streaming-media-handler'."
(nreverse result)))
(cl-defmethod llm-provider-headers ((provider llm-claude))
- `(("x-api-key" . ,(if (functionp (llm-claude-key provider))
- (funcall (llm-claude-key provider))
- (llm-claude-key provider)))
- ("anthropic-version" . "2023-06-01")
- ("anthropic-beta" . "tools-2024-04-04")))
+ (append
+ `(("x-api-key" . ,(if (functionp (llm-claude-key provider))
+ (funcall (llm-claude-key provider))
+ (llm-claude-key provider)))
+ ("anthropic-version" . "2023-06-01"))
+ (when (member 'json-response (llm-capabilities provider))
+ '(("anthropic-beta" . "structured-outputs-2025-11-13")))))
(cl-defmethod llm-provider-chat-extract-error ((_ llm-claude) response)
(when-let ((err (assoc-default 'error response)))
@@ -272,8 +284,11 @@ DATA is a vector of lists produced by
`llm-provider-streaming-media-handler'."
"Return the name of the provider."
"Claude")
-(cl-defmethod llm-capabilities ((_ llm-claude))
- (list 'streaming 'tool-use 'streaming-tool-use 'image-input 'pdf-input
'reasoning))
+(cl-defmethod llm-capabilities ((provider llm-claude))
+ (seq-union
+ '(streaming tool-use streaming-tool-use image-input pdf-input reasoning)
+ (when-let* ((model (llm-models-match (llm-claude-chat-model provider))))
+ (llm-model-capabilities model))))
(cl-defmethod llm-provider-append-to-prompt ((_ llm-claude) prompt result
&optional tool-use-results)
diff --git a/llm-models.el b/llm-models.el
index 198a251261..d4f08525cd 100644
--- a/llm-models.el
+++ b/llm-models.el
@@ -148,7 +148,7 @@ REGEX is a regular expression that can be used to identify
the model, uniquely (
;; https://docs.anthropic.com/en/docs/about-claude/models
(make-llm-model
:name "Claude 4.5 Sonnet" :symbol 'claude-4.5-sonnet
- :capabilities '(generation tool-use image-input pdf-input caching)
+ :capabilities '(generation tool-use image-input pdf-input caching
json-response)
:context-length 200000
:regex "claude-sonnet-4-5")
(make-llm-model
@@ -156,9 +156,14 @@ REGEX is a regular expression that can be used to identify
the model, uniquely (
:capabilities '(generation tool-use image-input pdf-input caching)
:context-length 200000
:regex "claude-haiku-4-5")
+ (make-llm-model
+ :name "Claude 4.5 Opus" :symbol 'claude-4-5-opus
+ :capabilities '(generation tool-use image-input pdf-input caching
json-response)
+ :context-length 200000
+ :regex "claude-opus-4-5")
(make-llm-model
:name "Claude 4.1 Opus" :symbol 'claude-4-1-opus
- :capabilities '(generation tool-use image-input pdf-input caching)
+ :capabilities '(generation tool-use image-input pdf-input caching
json-response)
:context-length 200000
:regex "claude-opus-4-1")
(make-llm-model