branch: externals/llm
commit 1fb8511867d46ad461cccb8dab8910e6ebddffdf
Author: Andrew Hyatt <ahy...@gmail.com>
Commit: Andrew Hyatt <ahy...@gmail.com>

    Use media type only in the llm-request-plz-async call
---
 llm-gemini.el      |  2 ++
 llm-ollama.el      | 29 ++++++++++++++++-------------
 llm-openai.el      | 31 +++++++++++++++++--------------
 llm-request-plz.el | 15 +++++----------
 llm-vertex.el      |  2 ++
 5 files changed, 42 insertions(+), 37 deletions(-)

diff --git a/llm-gemini.el b/llm-gemini.el
index 95735c55ca..9915601b30 100644
--- a/llm-gemini.el
+++ b/llm-gemini.el
@@ -65,6 +65,7 @@ You can get this at https://makersuite.google.com/app/apikey.";
   (let ((buf (current-buffer)))
     (llm-request-plz-async (llm-gemini--embedding-url provider)
                            :data (llm-gemini--embedding-request provider 
string)
+                           :media-type '(application/json)
                            :on-success (lambda (data)
                                          (llm-request-callback-in-buffer
                                           buf vector-callback 
(llm-gemini--embedding-response-handler data)))
@@ -111,6 +112,7 @@ If STREAMING-P is non-nil, use the streaming endpoint."
   (let ((buf (current-buffer)))
     (llm-request-plz-async (llm-gemini--chat-url provider nil)
                            :data (llm-gemini--chat-request prompt)
+                           :media-type '(application/json)
                            :on-success (lambda (data)
                                          (llm-request-callback-in-buffer
                                           buf response-callback
diff --git a/llm-ollama.el b/llm-ollama.el
index 1a31503133..e36701ff25 100644
--- a/llm-ollama.el
+++ b/llm-ollama.el
@@ -86,6 +86,7 @@ PROVIDER is the llm-ollama provider."
   (let ((buf (current-buffer)))
     (llm-request-plz-async (llm-ollama--url provider "embeddings")
                            :data (llm-ollama--embedding-request provider 
string)
+                           :media-type '(application/json)
                            :on-success (lambda (data)
                                          (llm-request-callback-in-buffer
                                           buf vector-callback 
(llm-ollama--embedding-extract-response data)))
@@ -144,19 +145,21 @@ STREAMING is a boolean to control whether to stream the 
response."
 
 (cl-defmethod llm-chat-async ((provider llm-ollama) prompt response-callback 
error-callback)
   (let ((buf (current-buffer)))
-    (llm-request-plz-async (llm-ollama--url provider "chat")
-      :data (llm-ollama--chat-request provider prompt nil)
-      :timeout llm-ollama-chat-timeout
-      :on-success (lambda (data)
-                    (let ((output (llm-ollama--get-response data)))
-                      (llm-provider-utils-append-to-prompt prompt data)
-                      (llm-request-plz-callback-in-buffer buf 
response-callback output)))
-      :on-error (lambda (_ data)
-                  (let ((errdata (cdr (assoc 'error data))))
-                    (llm-request-plz-callback-in-buffer buf error-callback 
'error
-                             (format "Problem calling Ollama: %s message: %s"
-                                     (cdr (assoc 'type errdata))
-                                     (cdr (assoc 'message errdata)))))))))
+    (llm-request-plz-async
+     (llm-ollama--url provider "chat")
+     :data (llm-ollama--chat-request provider prompt nil)
+     :media-type '(application/json)
+     :timeout llm-ollama-chat-timeout
+     :on-success (lambda (data)
+                   (let ((output (llm-ollama--get-response data)))
+                     (llm-provider-utils-append-to-prompt prompt data)
+                     (llm-request-plz-callback-in-buffer buf response-callback 
output)))
+     :on-error (lambda (_ data)
+                 (let ((errdata (cdr (assoc 'error data))))
+                   (llm-request-plz-callback-in-buffer buf error-callback 
'error
+                                                       (format "Problem 
calling Ollama: %s message: %s"
+                                                               (cdr (assoc 
'type errdata))
+                                                               (cdr (assoc 
'message errdata)))))))))
 
 (cl-defmethod llm-chat-streaming ((provider llm-ollama) prompt 
partial-callback response-callback error-callback)
   (let ((buf (current-buffer))
diff --git a/llm-openai.el b/llm-openai.el
index 17ecc30d68..28396ce045 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -116,6 +116,7 @@ This is just the key, if it exists."
   (llm-openai--check-key provider)
   (let ((buf (current-buffer)))
     (llm-request-plz-async (llm-openai--url provider "embeddings")
+                           :media-type '(application/json)
                            :headers (llm-openai--headers provider)
                            :data (llm-openai--embedding-request 
(llm-openai-embedding-model provider) string)
                            :on-success (lambda (data)
@@ -226,20 +227,22 @@ PROMPT is the prompt that needs to be updated with the 
response."
 (cl-defmethod llm-chat-async ((provider llm-openai) prompt response-callback 
error-callback)
   (llm-openai--check-key provider)
   (let ((buf (current-buffer)))
-    (llm-request-plz-async (llm-openai--url provider "chat/completions")
-      :headers (llm-openai--headers provider)
-      :data (llm-openai--chat-request (llm-openai-chat-model provider) prompt)
-      :on-success (lambda (data)
-                    (llm-request-plz-callback-in-buffer
-                       buf response-callback
-                       (llm-openai--process-and-return
-                        provider prompt data error-callback)))
-      :on-error (lambda (_ data)
-                  (let ((errdata (cdr (assoc 'error data))))
-                    (llm-request-plz-callback-in-buffer buf error-callback 
'error
-                             (format "Problem calling Open AI: %s message: %s"
-                                     (cdr (assoc 'type errdata))
-                                     (cdr (assoc 'message errdata)))))))))
+    (llm-request-plz-async
+     (llm-openai--url provider "chat/completions")
+     :media-type '(application/json)
+     :headers (llm-openai--headers provider)
+     :data (llm-openai--chat-request (llm-openai-chat-model provider) prompt)
+     :on-success (lambda (data)
+                   (llm-request-plz-callback-in-buffer
+                    buf response-callback
+                    (llm-openai--process-and-return
+                     provider prompt data error-callback)))
+     :on-error (lambda (_ data)
+                 (let ((errdata (cdr (assoc 'error data))))
+                   (llm-request-plz-callback-in-buffer buf error-callback 
'error
+                                                       (format "Problem 
calling Open AI: %s message: %s"
+                                                               (cdr (assoc 
'type errdata))
+                                                               (cdr (assoc 
'message errdata)))))))))
 
 (cl-defmethod llm-chat ((provider llm-openai) prompt)
   (llm-openai--check-key provider)
diff --git a/llm-request-plz.el b/llm-request-plz.el
index 1342f47ebb..a9cf9f63e9 100644
--- a/llm-request-plz.el
+++ b/llm-request-plz.el
@@ -131,7 +131,7 @@ and required otherwise.
 ON-ERROR will be called with the error code and a response-body.
 This is required.
 
-MEDIA-TYPE is an optional argument that sets a media type, useful
+MEDIA-TYPE is a required argument that sets a media type, useful
 for streaming formats.  It is expected that this is only used by
 other methods in this file.
 
@@ -139,22 +139,17 @@ ON-SUCCESS-RAW, if set, will be called in the buffer with 
the
 response body, and expect the response content. This is an
 optional argument, and mostly useful for streaming.  If not set,
 the buffer is turned into JSON and passed to ON-SUCCESS."
+  (unless media-type
+    (error "MEDIA-TYPE is required in llm-request-plz-async"))
   (plz-media-type-request
     'post url
-    :as (if media-type
-            `(media-types ,(cons media-type plz-media-types))
-            'string)
+    :as `(media-types ,(cons media-type plz-media-types))
     :body (when data
             (encode-coding-string (json-encode data) 'utf-8))
     :headers (append headers
                      '(("Content-Type" . "application/json")))
     :then (lambda (response)
-            ;; Media types can return a response object sometimes, otherwise it
-            ;; is a string.  This is normal, since this is dependent on the
-            ;; `:as' argument.
-            (let ((response (if (plz-response-p response)
-                                (plz-response-body response)
-                              response)))
+            (let ((response (plz-response-body response)))
               (when on-success-raw
                 (funcall on-success-raw response))
               (when on-success
diff --git a/llm-vertex.el b/llm-vertex.el
index a965fdcb5d..ec16a2462d 100644
--- a/llm-vertex.el
+++ b/llm-vertex.el
@@ -130,6 +130,7 @@ KEY-GENTIME keeps track of when the key was generated, 
because the key must be r
      (llm-vertex--embedding-url provider)
      :headers `(("Authorization" . ,(format "Bearer %s" (llm-vertex-key 
provider))))
      :data `(("instances" . [(("content" . ,string))]))
+     :media-type '(application/json)
      :on-success (lambda (data)
                    (llm-request-callback-in-buffer
                     buf vector-callback 
(llm-vertex--embedding-extract-response data)))
@@ -301,6 +302,7 @@ If STREAMING is non-nil, use the URL for the streaming API."
      (llm-vertex--chat-url provider)
      :headers `(("Authorization" . ,(format "Bearer %s" (llm-vertex-key 
provider))))
      :data (llm-vertex--chat-request prompt)
+     :media-type '(application/json)
      :on-success (lambda (data)
                    (llm-request-callback-in-buffer
                     buf response-callback

Reply via email to