branch: externals/llm
commit bfb393b31ea98cbe5ebc25592b3cceab46764861
Author: Andrew Hyatt <ahy...@gmail.com>
Commit: Andrew Hyatt <ahy...@gmail.com>

    Allow context and examples to be set after the first interaction
    
    They always will be set on the first interaction, though.  This allows 
context
    and examples to be used with multiple messages, with llm-make-chat-prompt.
    
    This is needed to fix https://github.com/ahyatt/llm/issues/43.
---
 llm-provider-utils-test.el | 32 +++++---------------------------
 llm-provider-utils.el      | 17 ++++++++---------
 2 files changed, 13 insertions(+), 36 deletions(-)

diff --git a/llm-provider-utils-test.el b/llm-provider-utils-test.el
index 0b479bb369..e89354803f 100644
--- a/llm-provider-utils-test.el
+++ b/llm-provider-utils-test.el
@@ -25,7 +25,6 @@
 
 (ert-deftest llm-provider-utils-combine-to-system-prompt ()
   (let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content 
"Hello"))
-         (interaction2 (make-llm-chat-prompt-interaction :role 'assistant 
:content "Hi! How can I assist you?"))
          (example1 (cons "Request 1" "Response 1"))
          (example2 (cons "Request 2" "Response 2"))
          (prompt-for-first-request
@@ -33,12 +32,6 @@
            :context "Example context"
            :interactions (list (copy-llm-chat-prompt-interaction interaction1))
            :examples (list example1 example2)))
-         (prompt-for-second-request
-          (make-llm-chat-prompt
-           :context "An example context"
-           :interactions (list (copy-llm-chat-prompt-interaction interaction1)
-                               (copy-llm-chat-prompt-interaction interaction2))
-           :examples (list example1 example2)))
          (prompt-with-existing-system-prompt
           (make-llm-chat-prompt
            :context "Example context"
@@ -51,12 +44,8 @@
     (should (equal "Example context\nHere are 2 examples of how to 
respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: 
Response 2"
                    (llm-chat-prompt-interaction-content (nth 0 
(llm-chat-prompt-interactions prompt-for-first-request)))))
     (should (equal "Hello" (llm-chat-prompt-interaction-content (nth 1 
(llm-chat-prompt-interactions prompt-for-first-request)))))
-
-    ;; Nothing should be done on the second request.
-    (should (= 2 (length (llm-chat-prompt-interactions 
prompt-for-second-request))))
-    (llm-provider-utils-combine-to-system-prompt prompt-for-second-request)
-    (should (equal interaction1 (nth 0 (llm-chat-prompt-interactions 
prompt-for-second-request))))
-    (should (equal interaction2 (nth 1 (llm-chat-prompt-interactions 
prompt-for-second-request))))
+    (should-not (llm-chat-prompt-context prompt-for-first-request))
+    (should-not (llm-chat-prompt-examples prompt-for-first-request))
 
     ;; On the request with the existing system prompt, it should append the new
     ;; text to the existing system prompt.
@@ -67,31 +56,20 @@
 
 (ert-deftest llm-provider-utils-combine-to-user-prompt ()
   (let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content 
"Hello"))
-         (interaction2 (make-llm-chat-prompt-interaction :role 'assistant 
:content "Hi! How can I assist you?"))
          (example1 (cons "Request 1" "Response 1"))
          (example2 (cons "Request 2" "Response 2"))
          (prompt-for-first-request
           (make-llm-chat-prompt
            :context "Example context"
            :interactions (list (copy-llm-chat-prompt-interaction interaction1))
-           :examples (list example1 example2)))
-         (prompt-for-second-request
-          (make-llm-chat-prompt
-           :context "An example context"
-           :interactions (list (copy-llm-chat-prompt-interaction interaction1)
-                               (copy-llm-chat-prompt-interaction interaction2))
            :examples (list example1 example2))))
     ;; In the first request, the system prompt should be prepended to the user 
request.
     (llm-provider-utils-combine-to-user-prompt prompt-for-first-request)
     (should (= 1 (length (llm-chat-prompt-interactions 
prompt-for-first-request))))
+    (should-not (llm-chat-prompt-context prompt-for-first-request))
+    (should-not (llm-chat-prompt-examples prompt-for-first-request))
     (should (equal "Example context\nHere are 2 examples of how to 
respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: 
Response 2\nHello"
-                   (llm-chat-prompt-interaction-content (nth 0 
(llm-chat-prompt-interactions prompt-for-first-request)))))
-
-    ;; Nothing should be done on the second request.
-    (should (= 2 (length (llm-chat-prompt-interactions 
prompt-for-second-request))))
-    (llm-provider-utils-combine-to-user-prompt prompt-for-second-request)
-    (should (equal interaction1 (nth 0 (llm-chat-prompt-interactions 
prompt-for-second-request))))
-    (should (equal interaction2 (nth 1 (llm-chat-prompt-interactions 
prompt-for-second-request))))))
+                   (llm-chat-prompt-interaction-content (nth 0 
(llm-chat-prompt-interactions prompt-for-first-request)))))))
 
 (ert-deftest llm-provider-utils-collapse-history ()
   (let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content 
"Hello"))
diff --git a/llm-provider-utils.el b/llm-provider-utils.el
index dc57081134..ed1fea3e41 100644
--- a/llm-provider-utils.el
+++ b/llm-provider-utils.el
@@ -290,11 +290,7 @@ This should be used for providers that have a notion of a 
system prompt.
 If there is a system prompt, and no assistant response, add to it.
 If there is no system prompt, create one.
 If there is an assistance response, do nothing."
-  (unless (seq-some
-           (lambda (interaction)
-             (eq (llm-chat-prompt-interaction-role interaction) 'assistant))
-           (llm-chat-prompt-interactions prompt))
-    (let ((system-prompt (seq-find
+  (let ((system-prompt (seq-find
                           (lambda (interaction)
                             (eq (llm-chat-prompt-interaction-role interaction) 
'system))
                           (llm-chat-prompt-interactions prompt)))
@@ -308,17 +304,20 @@ If there is an assistance response, do nothing."
           (push (make-llm-chat-prompt-interaction
                  :role 'system
                  :content system-content)
-                (llm-chat-prompt-interactions prompt)))))))
+                (llm-chat-prompt-interactions prompt))
+          (setf (llm-chat-prompt-context prompt) nil
+                (llm-chat-prompt-examples prompt) nil)))))
 
 (defun llm-provider-utils-combine-to-user-prompt (prompt &optional 
example-prelude)
   "Add context and examples to a user prompt in PROMPT.
 This should be used for providers that do not have a notion of a system 
prompt."
-  (when (= (length (llm-chat-prompt-interactions prompt)) 1)
-    (when-let ((system-content (llm-provider-utils-get-system-prompt prompt 
example-prelude)))
+  (when-let ((system-content (llm-provider-utils-get-system-prompt prompt 
example-prelude)))
       (setf (llm-chat-prompt-interaction-content (car 
(llm-chat-prompt-interactions prompt)))
             (concat system-content
                     "\n"
-                    (llm-chat-prompt-interaction-content (car 
(llm-chat-prompt-interactions prompt))))))))
+                    (llm-chat-prompt-interaction-content (car 
(llm-chat-prompt-interactions prompt))))
+            (llm-chat-prompt-context prompt) nil
+            (llm-chat-prompt-examples prompt) nil)))
 
 (defun llm-provider-utils-collapse-history (prompt &optional history-prelude)
   "Collapse history to a single prompt.

Reply via email to