branch: externals/llm
commit 669456cba534550ac8dc87ac4aff4849721d159f
Author: Roman Scherer <ro...@burningswell.com>
Commit: Roman Scherer <ro...@burningswell.com>

    Fix Ollama error handling and appending to prompt
    
    Instead of adding the message to the prompt, a whole object was added
    to the prompt. A later request failed with an "json: cannot unmarshal
    object ..." error.
    
    The Ollama error handling assumed a response in the OpenAI format and
    raised an error when fishing out the type from a list, which actually
    is a string.
---
 llm-ollama.el | 15 +++++++--------
 1 file changed, 7 insertions(+), 8 deletions(-)

diff --git a/llm-ollama.el b/llm-ollama.el
index b8409f5632..0a80e9955f 100644
--- a/llm-ollama.el
+++ b/llm-ollama.el
@@ -149,15 +149,14 @@ STREAMING is a boolean to control whether to stream the 
response."
      :data (llm-ollama--chat-request provider prompt nil)
      :timeout llm-ollama-chat-timeout
      :on-success (lambda (data)
-                   (let ((output (llm-ollama--get-response data)))
-                     (llm-provider-utils-append-to-prompt prompt data)
-                     (llm-request-plz-callback-in-buffer buf response-callback 
output)))
-     :on-error (lambda (_ data)
-                 (let ((errdata (cdr (assoc 'error data))))
+                   (let ((response (llm-ollama--get-response data)))
+                     (llm-provider-utils-append-to-prompt prompt response)
+                     (llm-request-plz-callback-in-buffer buf response-callback 
response)))
+     :on-error (lambda (code data)
+                 (let ((error-message (cdr (assoc 'error data))))
                    (llm-request-plz-callback-in-buffer buf error-callback 
'error
                                                        (format "Problem 
calling Ollama: %s message: %s"
-                                                               (cdr (assoc 
'type errdata))
-                                                               (cdr (assoc 
'message errdata)))))))))
+                                                               code 
error-message)))))))
 
 (cl-defmethod llm-chat-streaming ((provider llm-ollama) prompt 
partial-callback response-callback error-callback)
   (let ((buf (current-buffer))
@@ -177,7 +176,7 @@ STREAMING is a boolean to control whether to stream the 
response."
       :on-error (lambda (_ _)
                   ;; The problem with ollama is that it doesn't
                   ;; seem to have an error response.
-                  (llm-request-callback-in-buffer buf error-callback "Unknown 
error calling ollama")))))
+                  (llm-request-callback-in-buffer buf error-callback 'error 
"Unknown error calling ollama")))))
 
 (cl-defmethod llm-name ((provider llm-ollama))
   (llm-ollama-chat-model provider))

Reply via email to