branch: elpa/gptel
commit 10752576e4487652d1aea642a75fed5c549169b1
Author: Karthik Chikmagalur <karthikchikmaga...@gmail.com>
Commit: Karthik Chikmagalur <karthikchikmaga...@gmail.com>

    gptel: Move system-message to gptel--request-data
    
    * gptel-openai.el (gptel--request-data, gptel--parse-buffer): Add
    the system message when the full request payload is constructed in
    `gptel--request-data', and not when parsing the buffer in
    `gptel--parse-buffer'.  This makes it much easier to handle
    templated system messages, to be added next.
    
    * gptel-ollama.el (gptel--request-data, gptel--parse-buffer):
    Ditto.
    
    * gptel-gemini.el (gptel--request-data, gptel--parse-buffer):
    Ditto.
---
 gptel-gemini.el | 17 ++++++++---------
 gptel-ollama.el | 12 ++++++------
 gptel-openai.el | 12 ++++++------
 3 files changed, 20 insertions(+), 21 deletions(-)

diff --git a/gptel-gemini.el b/gptel-gemini.el
index 6c78dca129..2954a1cfc3 100644
--- a/gptel-gemini.el
+++ b/gptel-gemini.el
@@ -61,6 +61,14 @@
 
 (cl-defmethod gptel--request-data ((_backend gptel-gemini) prompts)
   "JSON encode PROMPTS for sending to Gemini."
+  ;; HACK (backwards compatibility) Prepend the system message to the first 
user
+  ;; prompt, but only for gemini-pro.
+  (when (and (equal gptel-model 'gemini-pro) gptel--system-message)
+    (cl-callf
+        (lambda (msg)
+          (vconcat `((:text ,(concat gptel--system-message "\n\n"))) msg))
+        (thread-first (car prompts)
+                      (plist-get :parts))))
   (let ((prompts-plist
          `(:contents [,@prompts]
            :safetySettings [(:category "HARM_CATEGORY_HARASSMENT"
@@ -131,15 +139,6 @@
                   :parts
                   `[(:text ,(string-trim (buffer-substring-no-properties 
(point-min) (point-max))))])
             prompts))
-    ;; HACK Prepend the system message to the first user prompt, but only for
-    ;; this model.
-    (when (and (equal gptel-model 'gemini-pro)
-               gptel--system-message)
-      (cl-callf
-          (lambda (msg)
-            (vconcat `((:text ,(concat gptel--system-message "\n\n"))) msg))
-          (thread-first (car prompts)
-                        (plist-get :parts))))
     prompts))
 
 (defun gptel--gemini-parse-multipart (parts)
diff --git a/gptel-ollama.el b/gptel-ollama.el
index 1bdc266c90..d37d02f6d2 100644
--- a/gptel-ollama.el
+++ b/gptel-ollama.el
@@ -75,6 +75,11 @@ Intended for internal use only.")
 
 (cl-defmethod gptel--request-data ((_backend gptel-ollama) prompts)
   "JSON encode PROMPTS for sending to ChatGPT."
+  (when (and gptel--system-message
+             (not (gptel--model-capable-p 'nosystem)))
+    (push (list :role "system"
+                :content gptel--system-message)
+          prompts))
   (let ((prompts-plist
          `(:model ,(gptel--model-name gptel-model)
            :messages [,@prompts]
@@ -135,12 +140,7 @@ Intended for internal use only.")
                   :content
                   (string-trim (buffer-substring-no-properties (point-min) 
(point-max))))
             prompts))
-    (if (and (not (gptel--model-capable-p 'nosystem))
-             gptel--system-message)
-        (cons (list :role "system"
-                    :content gptel--system-message)
-              prompts)
-      prompts)))
+    prompts))
 
 (defun gptel--ollama-parse-multipart (parts)
   "Convert a multipart prompt PARTS to the Ollama API format.
diff --git a/gptel-openai.el b/gptel-openai.el
index e33f17c3ff..9f19a0e310 100644
--- a/gptel-openai.el
+++ b/gptel-openai.el
@@ -138,6 +138,11 @@ with differing settings.")
 
 (cl-defmethod gptel--request-data ((_backend gptel-openai) prompts)
   "JSON encode PROMPTS for sending to ChatGPT."
+  (when (and gptel--system-message
+             (not (gptel--model-capable-p 'nosystem)))
+    (push (list :role "system"
+                :content gptel--system-message)
+          prompts))
   (let ((prompts-plist
          `(:model ,(gptel--model-name gptel-model)
            :messages [,@prompts]
@@ -195,12 +200,7 @@ with differing settings.")
                   :content
                   (gptel--trim-prefixes (buffer-substring-no-properties 
(point-min) (point-max))))
             prompts))
-    (if (and (not (gptel--model-capable-p 'nosystem))
-             gptel--system-message)
-        (cons (list :role "system"
-                    :content gptel--system-message)
-              prompts)
-      prompts)))
+    prompts))
 
 ;; TODO This could be a generic function
 (defun gptel--openai-parse-multipart (parts)

Reply via email to