branch: externals/llm
commit 0f1577f9e85a14ac4836bf50189fd0975e887a4d
Author: Andrew Hyatt <ahy...@gmail.com>
Commit: Andrew Hyatt <ahy...@gmail.com>

    Revert "Enable Open AI compatible providers to customize error extraction"
    
    This reverts commit 3953df35d5aacce63efd0ebb6ee360c31dcc9017.
---
 NEWS.org      |  4 +---
 llm-openai.el | 30 +++++++++++++-----------------
 2 files changed, 14 insertions(+), 20 deletions(-)

diff --git a/NEWS.org b/NEWS.org
index 51fe77d2ca..f31863f492 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -1,8 +1,6 @@
-* Verson 0.12.4
+* Verson 0.12.3
 - Refactor of warn-non-nonfree methods.
 - Add non-free warnings for Gemini and Claude.
-* Version 0.12.3
-- Enable customization of error message handling for Open AI compatible 
providers, via =llm-openai--error-message=.
 * Version 0.12.2
 - Send connection issues to error callbacks, and fix an error handling issue 
in Ollama.
 - Fix issue where, in some cases, streaming does not work the first time 
attempted.
diff --git a/llm-openai.el b/llm-openai.el
index 1dda155a01..01e1b239cb 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -74,10 +74,19 @@ MODEL is the embedding model to use, or nil to use the 
default.."
   "Return the embedding from the server RESPONSE."
   (cdr (assoc 'embedding (aref (cdr (assoc 'data response)) 0))))
 
+(defun llm-openai--error-message (err-response)
+  "Return a user-visible error message from ERR-RESPONSE."
+  (if (stringp err-response)
+      err-response
+    (let ((errdata (cdr (assoc 'error err-response))))
+      (format "Open AI returned error: %s message: %s"
+              (cdr (assoc 'type errdata))
+              (cdr (assoc 'message errdata))))))
+
 (defun llm-openai--handle-response (response extractor)
   "If RESPONSE is an error, throw it, else call EXTRACTOR."
-  (if-let ((err-msg (llm-openai--error-message response)))
-      (error err-msg)
+  (if (cdr (assoc 'error response))
+      (error (llm-openai--error-message response))
     (funcall extractor response)))
 
 (cl-defmethod llm-openai--check-key ((provider llm-openai))
@@ -107,19 +116,6 @@ MODEL is the embedding model to use, or nil to use the 
default.."
           (unless (string-suffix-p "/" (llm-openai-compatible-url provider))
             "/") command))
 
-(cl-defgeneric llm-openai--error-message (provider err-response)
-  "Return a user-visible error message from ERR-RESPONSE.
-If ERR-RESPONSE is not an error, return nil.")
-
-(cl-defmethod llm-openai--error-message ((_ llm-openai) err-response)
-  (if (stringp err-response)
-      err-response
-    (let ((errdata (assoc-default 'error err-response)))
-      (when errdata
-        (format "Open AI returned error: %s message: %s"
-                (cdr (assoc 'type errdata))
-                (cdr (assoc 'message errdata)))))))
-
 (cl-defmethod llm-embedding-async ((provider llm-openai) string 
vector-callback error-callback)
   (llm-openai--check-key provider)  
   (let ((buf (current-buffer)))
@@ -218,10 +214,10 @@ This function adds the response to the prompt, executes 
any
 functions, and returns the value that the client should get back.
 
 PROMPT is the prompt that needs to be updated with the response."
-  (if-let ((err-msg (llm-openai--error-message provider response)))
+  (if (and (consp response) (cdr (assoc 'error response)))
       (progn
         (when error-callback
-          (funcall error-callback 'error err-msg))
+          (funcall error-callback 'error (llm-openai--error-message response)))
         response)
     ;; When it isn't an error
     (llm-provider-utils-process-result

Reply via email to