branch: externals/llm commit 609c8a683446f348f7b65d282f420a27550bd4a5 Author: Andrew Hyatt <ahy...@gmail.com> Commit: Andrew Hyatt <ahy...@gmail.com>
Fix error message handling The first argument to error callbacks should always be a symbol. The second could be a string in many cases, but often it is parsed json which needs the error message extracted. --- llm-ollama.el | 25 +++++++++++++------------ llm-openai.el | 13 ++++--------- llm-request-plz.el | 21 ++++++++------------- 3 files changed, 25 insertions(+), 34 deletions(-) diff --git a/llm-ollama.el b/llm-ollama.el index e6dd01b305..f3d7ff4d72 100644 --- a/llm-ollama.el +++ b/llm-ollama.el @@ -82,6 +82,10 @@ PROVIDER is the llm-ollama provider." "Return the embedding from the server RESPONSE." (assoc-default 'embedding response)) +(defun llm-ollama--error-message (data) + "Return the error message from DATA." + (if (stringp data) data (assoc-default 'error data))) + (cl-defmethod llm-embedding-async ((provider llm-ollama) string vector-callback error-callback) (let ((buf (current-buffer))) (llm-request-plz-async (llm-ollama--url provider "embeddings") @@ -89,11 +93,10 @@ PROVIDER is the llm-ollama provider." :on-success (lambda (data) (llm-request-callback-in-buffer buf vector-callback (llm-ollama--embedding-extract-response data))) - :on-error (lambda (_ _) - ;; The problem with ollama is that it doesn't - ;; seem to have an error response. + :on-error (lambda (type err) (llm-request-callback-in-buffer - buf error-callback 'error "Unknown error calling ollama"))))) + buf error-callback type + (llm-ollama--error-message)))))) (cl-defmethod llm-embedding ((provider llm-ollama) string) (llm-ollama--embedding-extract-response @@ -153,10 +156,9 @@ STREAMING is a boolean to control whether to stream the response." (llm-provider-utils-append-to-prompt prompt response) (llm-request-plz-callback-in-buffer buf response-callback response))) :on-error (lambda (code data) - (let ((error-message (cdr (assoc 'error data)))) - (llm-request-plz-callback-in-buffer buf error-callback 'error - (format "Problem calling Ollama: %s message: %s" - code error-message))))))) + (llm-request-plz-callback-in-buffer + buf error-callback 'error + (llm-ollama--error-message data)))))) (cl-defmethod llm-chat-streaming ((provider llm-ollama) prompt partial-callback response-callback error-callback) (let ((buf (current-buffer)) @@ -173,10 +175,9 @@ STREAMING is a boolean to control whether to stream the response." (when-let ((response (llm-ollama--get-response data))) (setq response-text (concat response-text response)) (llm-request-callback-in-buffer buf partial-callback response-text))) - :on-error (lambda (_ _) - ;; The problem with ollama is that it doesn't - ;; seem to have an error response. - (llm-request-callback-in-buffer buf error-callback 'error "Unknown error calling ollama"))))) + :on-error (lambda (type msg) + (llm-request-callback-in-buffer buf error-callback type + (llm-ollama--error-message msg)))))) (cl-defmethod llm-name ((provider llm-ollama)) (llm-ollama-chat-model provider)) diff --git a/llm-openai.el b/llm-openai.el index 610c6eba75..7ee30a24b7 100644 --- a/llm-openai.el +++ b/llm-openai.el @@ -241,9 +241,8 @@ PROMPT is the prompt that needs to be updated with the response." (llm-openai--process-and-return provider prompt data error-callback))) :on-error (lambda (_ data) - (let ((errdata (cdr (assoc 'error data)))) - (llm-request-plz-callback-in-buffer buf error-callback 'error - (llm-openai--error-message data))))))) + (llm-request-plz-callback-in-buffer buf error-callback 'error + (llm-openai--error-message data)))))) (cl-defmethod llm-chat ((provider llm-openai) prompt) (llm-openai--check-key provider) @@ -317,13 +316,9 @@ RESPONSE can be nil if the response is complete." (llm-request-plz-callback-in-buffer buf error-callback 'error data)))) :on-error (lambda (_ data) - (let ((errdata - (cdr (assoc 'error data)))) - (llm-request-plz-callback-in-buffer + (llm-request-plz-callback-in-buffer buf error-callback 'error - (format "Problem calling Open AI: %s message: %s" - (cdr (assoc 'type errdata)) - (cdr (assoc 'message errdata)))))) + (llm-openai--error-message data))) :on-success (lambda (_) (llm-request-plz-callback-in-buffer buf diff --git a/llm-request-plz.el b/llm-request-plz.el index fa7a9e5284..8d7b9a94cd 100644 --- a/llm-request-plz.el +++ b/llm-request-plz.el @@ -94,28 +94,23 @@ TIMEOUT is the number of seconds to wait for a response." :timeout timeout)) (defun llm-request-plz--handle-error (error on-error) - "Handle the ERROR with the ON-ERROR callback. - -For HTTP errors, ON-ERROR will be called with the HTTP status -code and the HTTP body of the error response. - -For Curl errors, ON-ERROR will be called with the exit code of -the curl process and an error message." + "Handle the ERROR with the ON-ERROR callback." (cond ((plz-media-type-filter-error-p error) (let ((cause (plz-media-type-filter-error-cause error)) (response (plz-error-response error))) - ;; TODO: What do we want to pass to callers here? - (funcall on-error 'filter-error cause))) + (funcall on-error 'error + (format "Error with cause: %s, response: %s" cause response)))) ((plz-error-curl-error error) (let ((curl-error (plz-error-curl-error error))) - (funcall on-error - (car curl-error) - (cdr curl-error)))) + (funcall on-error 'error + (format "curl error code %d: %s" + (car curl-error) + (cdr curl-error))))) ((plz-error-response error) (when-let ((response (plz-error-response error)) (status (plz-response-status response)) (body (plz-response-body response))) - (funcall on-error status body))) + (funcall on-error 'error body))) (t (user-error "Unexpected error: %s" error)))) (cl-defun llm-request-plz-async (url &key headers data on-success media-type