branch: externals/llm
commit 48ae59d14977aae60c6f2405fc9d8bbcf2182a3f
Author: Andrew Hyatt <[email protected]>
Commit: Andrew Hyatt <[email protected]>
Fix llm-chat-prompt-to-text, which was unusable
---
llm.el | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/llm.el b/llm.el
index 29e907a093..f83233eaaf 100644
--- a/llm.el
+++ b/llm.el
@@ -142,10 +142,10 @@ ways."
(defun llm-chat-prompt-to-text (prompt)
"Convert PROMPT `llm-chat-prompt' to a simple text.
This should only be used for logging or debugging."
- (format "Context: %s\nExamples: %s\nInteractions: %s\nTemperature: %f\nMax
tokens: %d\n"
+ (format "Context: %s\nExamples: %s\nInteractions: %s\n%s%s\n"
(llm-chat-prompt-context prompt)
(mapconcat (lambda (e) (format "User: %s\nResponse: %s" (car e) (cdr
e)))
- (llm-chat-prompt-interactions prompt) "\n")
+ (llm-chat-prompt-examples prompt) "\n")
(mapconcat (lambda (i)
(format "%s: %s"
(pcase (llm-chat-prompt-interaction-role i)
@@ -154,8 +154,12 @@ This should only be used for logging or debugging."
('assistant "Assistant"))
(llm-chat-prompt-interaction-content i)))
(llm-chat-prompt-interactions prompt) "\n")
- (llm-chat-prompt-temperature prompt)
- (llm-chat-prompt-max-tokens prompt)))
+ (if (llm-chat-prompt-temperature prompt)
+ (format "Temperature: %s\n" (llm-chat-prompt-temperature prompt))
+ "")
+ (if (llm-chat-prompt-max-tokens prompt)
+ (format "Max tokens: %s\n" (llm-chat-prompt-max-tokens prompt))
+ "")))
(provide 'llm)