branch: externals/llm commit 5623482527a3e25eb686e8df6f188fa3821ba628 Author: Roman Scherer <ro...@burningswell.com> Commit: Roman Scherer <ro...@burningswell.com>
Fix error handling in Gemini and Vertex provider The :else callback of the json-array media type doesn't contain the error messages. The media type always streams back the objects in the JSON array via the :handlers. In case of errors, the Vertex and Gemini (I think, I don't have access to it) response is also an array, with the elements being the error objects. --- llm-gemini.el | 20 ++++++++++---------- llm-vertex.el | 20 ++++++++++---------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/llm-gemini.el b/llm-gemini.el index 9915601b30..232cfeac86 100644 --- a/llm-gemini.el +++ b/llm-gemini.el @@ -131,12 +131,15 @@ If STREAMING-P is non-nil, use the streaming endpoint." (llm-gemini--chat-url provider t) :data (llm-gemini--chat-request prompt) :on-element (lambda (element) - (when-let ((response (llm-vertex--get-chat-response element))) - (if (stringp response) - (when (> (length response) 0) - (setq streamed-text (concat streamed-text response)) - (llm-request-callback-in-buffer buf partial-callback streamed-text)) - (setq function-call response)))) + (if (alist-get 'error element) + (llm-request-callback-in-buffer buf error-callback 'error + (llm-vertex--error-message element)) + (when-let ((response (llm-vertex--get-chat-response element))) + (if (stringp response) + (when (> (length response) 0) + (setq streamed-text (concat streamed-text response)) + (llm-request-callback-in-buffer buf partial-callback streamed-text)) + (setq function-call response))))) :on-success (lambda (data) (llm-request-callback-in-buffer buf response-callback @@ -144,10 +147,7 @@ If STREAMING-P is non-nil, use the streaming endpoint." provider prompt (or function-call (if (> (length streamed-text) 0) streamed-text - (llm-vertex--get-chat-response data)))))) - :on-error (lambda (_ data) - (llm-request-callback-in-buffer buf error-callback 'error - (llm-vertex--error-message data)))))) + (llm-vertex--get-chat-response data))))))))) (defun llm-gemini--count-token-url (provider) "Return the URL for the count token call, using PROVIDER." diff --git a/llm-vertex.el b/llm-vertex.el index ec16a2462d..890c998f19 100644 --- a/llm-vertex.el +++ b/llm-vertex.el @@ -322,12 +322,15 @@ If STREAMING is non-nil, use the URL for the streaming API." :headers `(("Authorization" . ,(format "Bearer %s" (llm-vertex-key provider)))) :data (llm-vertex--chat-request prompt) :on-element (lambda (element) - (when-let ((response (llm-vertex--get-chat-response element))) - (if (stringp response) - (when (> (length response) 0) - (setq streamed-text (concat streamed-text response)) - (llm-request-callback-in-buffer buf partial-callback streamed-text)) - (setq function-call response)))) + (if (alist-get 'error element) + (llm-request-callback-in-buffer buf error-callback 'error + (llm-vertex--error-message element)) + (when-let ((response (llm-vertex--get-chat-response element))) + (if (stringp response) + (when (> (length response) 0) + (setq streamed-text (concat streamed-text response)) + (llm-request-callback-in-buffer buf partial-callback streamed-text)) + (setq function-call response))))) :on-success (lambda (data) (llm-request-callback-in-buffer buf response-callback @@ -335,10 +338,7 @@ If STREAMING is non-nil, use the URL for the streaming API." provider prompt (or function-call (if (> (length streamed-text) 0) streamed-text - (llm-vertex--get-chat-response data)))))) - :on-error (lambda (_ data) - (llm-request-callback-in-buffer buf error-callback 'error - (llm-vertex--error-message data)))))) + (llm-vertex--get-chat-response data))))))))) ;; Token counts ;; https://cloud.google.com/vertex-ai/docs/generative-ai/get-token-count