branch: externals/llm
commit 2baae21ba821f246cfb257ab62ae016e246a8e8e
Author: Andrew Hyatt <[email protected]>
Commit: GitHub <[email protected]>

    Fixed bad interaction lines that were added in Ollama tool calling (#227)
    
    Also improve `llm-chat-prompt-to-text` for betting prompt debugging.
    
    This should resolve the issue noted in
    https://github.com/ahyatt/llm/issues/224#issuecomment-3690468752
---
 NEWS.org              |  2 ++
 llm-ollama.el         | 69 ++++++++++++++++++++++++++++++++-------------------
 llm-provider-utils.el |  3 ++-
 llm.el                | 34 +++++++++++++++++--------
 4 files changed, 72 insertions(+), 36 deletions(-)

diff --git a/NEWS.org b/NEWS.org
index 45f49bbe6d..b0e51ebc7b 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -1,4 +1,6 @@
 * Version 0.28.4
+- Removed bad interactions made in Ollama tool calls
+- Fixed Ollama tool calling requests
 - Fixed Ollama reasoning, whose API has changed
 * Version 0.28.3
 - Fixed breakage in Ollama streaming tool calling
diff --git a/llm-ollama.el b/llm-ollama.el
index deca6898ed..54768dafe8 100644
--- a/llm-ollama.el
+++ b/llm-ollama.el
@@ -129,26 +129,52 @@ PROVIDER is the llm-ollama provider."
   (llm-provider-utils-combine-to-system-prompt prompt 
llm-ollama-example-prelude)
   (let (request-plist messages options)
     (setq messages
-          (vconcat (mapcar (lambda (interaction)
+          (vconcat (mapcan (lambda (interaction)
                              (let* ((role (llm-chat-prompt-interaction-role 
interaction))
                                     (content 
(llm-chat-prompt-interaction-content interaction))
-                                    (content-text "")
+                                    (tool-results 
(llm-chat-prompt-interaction-tool-results interaction))
+                                    (tool-call-p (and (listp content)
+                                                      
(llm-provider-utils-tool-use-p (car content))))
                                     (images nil))
-                               (if (stringp content)
-                                   (setq content-text content)
-                                 (if (eq 'user role)
-                                     (dolist (part (llm-multipart-parts 
content))
-                                       (if (llm-media-p part)
-                                           (setq images (append images (list 
part)))
-                                         (setq content-text (concat 
content-text part))))
-                                   (setq content-text (json-serialize 
content))))
-                               (append
-                                `(:role ,(symbol-name role)
-                                        :content ,content-text)
-                                (when images
-                                  `(:images
-                                    ,(vconcat (mapcar (lambda (img) 
(base64-encode-string (llm-media-data img) t))
-                                                      images)))))))
+                               ;; Tool results expand to one tool line per
+                               ;; result, so multiple lines per this
+                               ;; interaction.
+                               (if tool-results
+                                   (mapcar (lambda (r)
+                                             `(:role "tool"
+                                                     :tool_name 
,(llm-chat-prompt-tool-result-tool-name r)
+                                                     :content 
,(llm-chat-prompt-tool-result-result r)))
+                                           tool-results)
+                                 (list (append
+                                        `(:role
+                                          ,(symbol-name role)
+                                          ,(if tool-call-p :tool_calls 
:content)
+                                          ,(cond
+                                            ((stringp content) content)
+                                            (tool-call-p
+                                             (cl-loop for tool in content
+                                                      and index from 0
+                                                      vconcat
+                                                      `((:type "function"
+                                                               :function
+                                                               (:index ,index
+                                                                       :name
+                                                                       
,(llm-provider-utils-tool-use-name tool)
+                                                                       
:arguments
+                                                                       
,(llm-provider-utils-tool-use-args tool))))))
+                                            ((llm-multipart-p content)
+                                             (cl-loop for part in 
(llm-multipart-parts content) do
+                                                      (when (llm-media-p part)
+                                                        (setq images (append 
images (list part))))
+                                                      concat
+                                                      (if (llm-media-p part)
+                                                          ""
+                                                        part)))
+                                            (t (json-serialize content))))
+                                        (when images
+                                          `(:images
+                                            ,(vconcat (mapcar (lambda (img) 
(base64-encode-string (llm-media-data img) t))
+                                                              images)))))))))
                            (llm-chat-prompt-interactions prompt))))
     (setq request-plist (plist-put request-plist :messages messages))
     (setq request-plist (plist-put request-plist :model (llm-ollama-chat-model 
provider)))
@@ -191,14 +217,7 @@ PROVIDER is the llm-ollama provider."
           (assoc-default 'tool_calls (assoc-default 'message response))))
 
 (cl-defmethod llm-provider-populate-tool-uses ((_ llm-ollama) prompt tool-uses)
-  (llm-provider-utils-append-to-prompt
-   prompt
-   (vconcat (mapcar (lambda (tool-use)
-                      `(:function (:name ,(llm-provider-utils-tool-use-name 
tool-use)
-                                         :arguments ,(json-serialize
-                                                      
(llm-provider-utils-tool-use-args tool-use)
-                                                      :false-object 
:json-false))))
-                    tool-uses))))
+  (llm-provider-utils-append-to-prompt prompt tool-uses))
 
 (cl-defmethod llm-provider-streaming-media-handler ((_ llm-ollama) receiver _)
   (cons 'application/x-ndjson
diff --git a/llm-provider-utils.el b/llm-provider-utils.el
index 262f398a23..ed039b72a2 100644
--- a/llm-provider-utils.el
+++ b/llm-provider-utils.el
@@ -747,7 +747,8 @@ SUCCESS-CALLBACK.
 
 SUCCESS-CALLBACK is the callback that will be run when all functions
 complete."
-  (when (plist-get partial-result :text)
+  (when (and (plist-get partial-result :text)
+             (> (length (plist-get partial-result :text)) 0))
     (llm-provider-append-to-prompt provider prompt (plist-get partial-result 
:text)))
   (if-let ((tool-uses (plist-get partial-result :tool-uses)))
       ;; If we have tool uses, execute them, and on the callback, we will
diff --git a/llm.el b/llm.el
index 27aab38df5..f1c2561331 100644
--- a/llm.el
+++ b/llm.el
@@ -776,16 +776,30 @@ This should only be used for logging or debugging."
                         (pcase (llm-chat-prompt-interaction-role i)
                           ('user "User")
                           ('system "System")
-                          ('assistant "Assistant"))
-                        (let ((content (llm-chat-prompt-interaction-content 
i)))
-                          (if (llm-multipart-p content)
-                              (mapcar (lambda (part) (if (llm-media-p part)
-                                                         (format "[%s data, %d 
bytes]"
-                                                                 
(llm-media-mime-type part)
-                                                                 (length 
(llm-media-data part)))
-                                                       part))
-                                      (llm-multipart-parts content))
-                            content))))
+                          ('assistant "Assistant")
+                          ('tool-results "Tool Results")
+                          (_ (llm-chat-prompt-interaction-role i)))
+                        (let* ((content-raw 
(llm-chat-prompt-interaction-content i))
+                               (tool-result-raw 
(llm-chat-prompt-interaction-tool-results i))
+                               (content (if (llm-multipart-p content-raw)
+                                            (mapcar (lambda (part) (if 
(llm-media-p part)
+                                                                       (format 
"[%s data, %d bytes]"
+                                                                               
(llm-media-mime-type part)
+                                                                               
(length (llm-media-data part)))
+                                                                     part))
+                                                    (llm-multipart-parts 
content-raw))
+                                          content-raw))
+                               (tool-results (when tool-result-raw
+                                               (mapconcat
+                                                (lambda (tr)
+                                                  (format "[Tool: %s, Result: 
%s]"
+                                                          
(llm-chat-prompt-tool-result-tool-name tr)
+                                                          
(llm-chat-prompt-tool-result-result tr)))
+                                                tool-result-raw
+                                                ", "))))
+                          (concat (if content (format "Content: %s" content) 
"")
+                                  (if tool-results
+                                      (format " Tool results: %s" 
tool-results))))))
               (llm-chat-prompt-interactions prompt) "\n")
    "\n"
    (when (llm-chat-prompt-temperature prompt)

Reply via email to