branch: externals/ellama
commit 9bd2641d8cea5c33f20f0114598403b55821d0d0
Author: Sergey Kostyaev <sskosty...@gmail.com>
Commit: Sergey Kostyaev <sskosty...@gmail.com>

    Update docs
---
 README.org | 22 ++++++++++++----------
 1 file changed, 12 insertions(+), 10 deletions(-)

diff --git a/README.org b/README.org
index f559b0d9fd..5d308e8691 100644
--- a/README.org
+++ b/README.org
@@ -43,11 +43,12 @@ In that case you should customize ellama configuration like 
this:
     ;; could be llm-openai for example
     (require 'llm-ollama)
     (setopt ellama-provider
-                   (make-llm-ollama
-                    ;; this model should be pulled to use it
-                    ;; value should be the same as you print in terminal 
during pull
-                    :chat-model "mistral:7b-instruct-v0.2-q6_K"
-                    :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
+         (make-llm-ollama
+          ;; this model should be pulled to use it
+          ;; value should be the same as you print in terminal during pull
+          :chat-model "llama3:8b-instruct-q8_0"
+          :embedding-model "nomic-embed-text"
+          :default-chat-non-standard-params '(("num_ctx" . 8192))))
     ;; Predefined llm providers for interactive switching.
     ;; You shouldn't add ollama providers here - it can be selected 
interactively
     ;; without it. It is just example.
@@ -63,14 +64,15 @@ In that case you should customize ellama configuration like 
this:
                                    :embedding-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"))))
     ;; Naming new sessions with llm
     (setopt ellama-naming-provider
-           (make-llm-ollama
-            :chat-model "mistral:7b-instruct-v0.2-q6_K"
-            :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
+         (make-llm-ollama
+          :chat-model "llama3:8b-instruct-q8_0"
+          :embedding-model "nomic-embed-text"
+          :default-chat-non-standard-params '(("stop" . ("\n")))))
     (setopt ellama-naming-scheme 'ellama-generate-name-by-llm)
     ;; Translation llm provider
     (setopt ellama-translation-provider (make-llm-ollama
-                                        :chat-model "sskostyaev/openchat:8k"
-                                        :embedding-model "nomic-embed-text")))
+                                      :chat-model 
"phi3:14b-medium-128k-instruct-q6_K"
+                                      :embedding-model "nomic-embed-text")))
 #+END_SRC
 
 ** Commands

Reply via email to