branch: elpa/aidermacs commit a99b6c0998c7520472c6af69ed639e1463e062bd Merge: 34fecf9703 c3ec966f0a Author: Matthew Zeng <matthew...@posteo.net> Commit: GitHub <nore...@github.com>
Merge pull request #11 from ArthurHeymans/DynamicallyModelFetch Dynamically fetch models from providers --- README.org | 64 ++++++++++++++++++++++-------- aidermacs-models.el | 112 +++++++++++++++++++++++++++++++++++++++++++++++++--- aidermacs.el | 13 +++--- 3 files changed, 161 insertions(+), 28 deletions(-) diff --git a/README.org b/README.org index c33d8aa1bb..9bb1ac49c6 100644 --- a/README.org +++ b/README.org @@ -31,35 +31,46 @@ While =aider.el= strictly mirrors Aider's CLI behavior, =Aidermacs= is built aro With =Aidermacs=, you get: -1. Better Support for Multiline Input - - =aider= is primarily designed as a command-line program, where multiline input is restricted by terminal limitations. - - Terminal-based tools require special syntax or manual formatting to handle multiline input, which can be cumbersome and unintuitive. - - =Aidermacs= eliminates these restrictions by handling multiline prompts natively within Emacs, allowing you to compose complex AI requests just like any other text input. - - Whether you're pasting blocks of code or refining AI-generated responses, multiline interactions in =Aidermacs= feel natural and seamless. +1. Intelligent Model Selection + - Automatic discovery of available models from multiple providers + - Real-time model compatibility checking + - Seamless integration with your configured API keys + - Caching for quick access to frequently used models + - Support for both popular pre-configured models and dynamically discovered ones + +2. Flexible Terminal Backend Support + - =Aidermacs= supports multiple terminal backends (comint and vterm) for better compatibility and performance + - Easy configuration to choose your preferred terminal emulation + - Extensible architecture for adding new backends -2. Smarter Syntax Highlighting +3. Smarter Syntax Highlighting - AI-generated code appears with proper syntax highlighting in major languages. - Ensures clarity and readability without additional configuration. -3. Flexible Terminal Backend Support - - =Aidermacs= supports multiple terminal backends (comint and vterm) for better compatibility and performance - - Easy configuration to choose your preferred terminal emulation - - Extensible architecture for adding new backends +4. Greater Configurability + - =Aidermacs= offers more customization options to tailor the experience to your preferences. + - You can easily disable automatic commits, choose to operate only within a specific directory, and customize keybindings. + - The model selection system provides fine-grained control over which AI models are available and how they're used. + - Terminal backend selection allows choosing between comint and vterm for different performance characteristics. + - Multiline input configuration adapts to your preferred editing style. + +5. Better Support for Multiline Input + - =aider= is primarily designed as a command-line program, where multiline input is restricted by terminal limitations. + - Terminal-based tools require special syntax or manual formatting to handle multiline input, which can be cumbersome and unintuitive. + - =Aidermacs= eliminates these restrictions by handling multiline prompts natively within Emacs, allowing you to compose complex AI requests just like any other text input. + - Whether you're pasting blocks of code or refining AI-generated responses, multiline interactions in =Aidermacs= feel natural and seamless. -4. Enhanced File Management from Emacs +6. Enhanced File Management from Emacs - List files currently in chat with =M-x aidermacs-list-added-files= - Drop specific files from chat with =M-x aidermacs-drop-file= - View output history with =M-x aidermacs-show-output-history= - and more -6. Greater Configurability - - =Aidermacs= offers more customization options to tailor the experience to your preferences. - - You can easily disable automatic commits, choose to operate only within a specific directory, and customize keybindings. - -5. Community-Driven Development +7. Community-Driven Development - =Aidermacs= is actively developed and maintained by the community, incorporating user feedback and contributions. - We prioritize features and improvements that directly benefit Emacs users, ensuring a tool that evolves with your needs. + ... and more to come 🚀 * Installation @@ -205,6 +216,27 @@ The =.aider.prompt.org= file (created with =M-x aidermacs-open-prompt-file=) is The file is automatically recognized and enables Aidermacs minor mode with the above keybindings. +** Dynamic Model Selection + +Aidermacs provides intelligent model selection that automatically detects and integrates with multiple AI providers: + +- Automatically fetches available models from supported providers (OpenAI, Anthropic, DeepSeek, Google Gemini, OpenRouter) +- Caches model lists for quick access +- Supports both popular pre-configured models and dynamically discovered ones +- Handles API keys and authentication automatically +- Provides model compatibility checking + +To change models: +1. Use =M-x aidermacs-change-model= or press =o= in the transient menu +2. Select from either: + - Popular pre-configured models (fast) + - Dynamically fetched models from all supported providers (comprehensive) + +The system will automatically filter models to only show ones that are: +- Supported by your current Aider version +- Available through your configured API keys +- Compatible with your current workflow + ** Tips 1. Start with Core Actions to begin a session diff --git a/aidermacs-models.el b/aidermacs-models.el index 464cc3bbbe..28a712137f 100644 --- a/aidermacs-models.el +++ b/aidermacs-models.el @@ -12,7 +12,7 @@ (defcustom aidermacs-popular-models '("anthropic/claude-3-5-sonnet-20241022" ;; really good in practical "o3-mini" ;; very powerful - "gemini/gemini-2.0-pro-exp-02-05" ;; free + "gemini/gemini-2.0-flash" ;; free "r1" ;; performance match o1, price << claude sonnet. weakness: small context "deepseek/deepseek-chat" ;; chatgpt-4o level performance, price is 1/100. weakness: small context ) @@ -22,17 +22,119 @@ Also based on aidermacs LLM benchmark: https://aidermacs.chat/docs/leaderboards/ :type '(repeat string) :group 'aidermacs-models) +(defvar aidermacs--cached-models aidermacs-popular-models + "Cache of available AI models.") + +(require 'json) +(require 'url) + +(defun fetch-openai-compatible-models (url) + "Fetch available models from an OpenAI compatible API endpoint at URL. +URL should be the base API endpoint, e.g. https://api.openai.com/v1. +Returns a list of model names with appropriate prefixes based on the API provider." + (let* ((url-parsed (url-generic-parse-url url)) + (hostname (url-host url-parsed)) + (prefix (cond ((string= hostname "api.openai.com") "openai") + ((string= hostname "openrouter.ai") "openrouter") + ((string= hostname "api.deepseek.com") "deepseek") + ((string= hostname "api.anthropic.com") "anthropic") + ((string= hostname "generativelanguage.googleapis.com") "gemini") + (t (error "Unknown API host: %s" hostname)))) + (token (cond ((string= hostname "api.openai.com") (getenv "OPENAI_API_KEY")) + ((string= hostname "openrouter.ai") (getenv "OPENROUTER_API_KEY")) + ((string= hostname "api.deepseek.com") (getenv "DEEPSEEK_API_KEY")) + ((string= hostname "api.anthropic.com") (getenv "ANTHROPIC_API_KEY")) + ((string= hostname "generativelanguage.googleapis.com") (getenv "GEMINI_API_KEY")) + (t (error "Unknown API host: %s" hostname))))) + (with-local-quit + (with-current-buffer + (let ((url-request-extra-headers + (cond ((string= hostname "api.anthropic.com") + `(("x-api-key" . ,token) + ("anthropic-version" . "2023-06-01"))) + ((string= hostname "generativelanguage.googleapis.com") + nil) ; No auth headers for Gemini, key is in URL + (t + `(("Authorization" . ,(concat "Bearer " token))))))) + (url-retrieve-synchronously + (if (string= hostname "generativelanguage.googleapis.com") + (concat url "/models?key=" token) + (concat url "/models")))) + (goto-char url-http-end-of-headers) + (let* ((json-object-type 'alist) + (json-data (json-read)) + (models (if (string= hostname "generativelanguage.googleapis.com") + (alist-get 'models json-data) + (alist-get 'data json-data)))) + (mapcar (lambda (model) + (concat prefix "/" + (cond + ((string= hostname "generativelanguage.googleapis.com") + (replace-regexp-in-string "^models/" "" (alist-get 'name model))) + ((stringp model) model) ; Handle case where model is just a string + (t (or (alist-get 'id model) + (alist-get 'name model)))))) + models)))))) + + (defun aidermacs--select-model () "Private function for model selection with completion." - (completing-read "Select AI model: " aidermacs-popular-models nil t nil nil (car aidermacs-popular-models))) + (let ((model (with-local-quit + (completing-read "Select AI model: " aidermacs--cached-models nil t)))) + (when model + (aidermacs--send-command (format "/model %s" model) t)))) + +(defun aidermacs--get-available-models () + "Get list of models supported by aider using the /models command." + (aidermacs--send-command + "/models /" t + (lambda (output) + (let* ((supported-models + (seq-filter + (lambda (line) + (string-prefix-p "- " line)) + (split-string output "\n" t))) + (models nil)) + (setq supported-models + (mapcar (lambda (line) + (substring line 2)) ; Remove "- " prefix + supported-models)) + (dolist (url '("https://api.openai.com/v1" + "https://openrouter.ai/api/v1" + "https://api.deepseek.com" + "https://api.anthropic.com/v1" + "https://generativelanguage.googleapis.com/v1beta")) + (condition-case err + (let* ((fetched-models (fetch-openai-compatible-models url)) + (filtered-models (seq-filter (lambda (model) + (member model supported-models)) + fetched-models))) + ;; (message "Fetched models from %s: %S" url fetched-models) + ;; (message "Filtered models from %s: %S" url filtered-models) + (setq models (append models filtered-models))) + (error (message "Failed to fetch models from %s: %s" url err)))) + (setq aidermacs--cached-models models) + (aidermacs--select-model))))) + +(defun aidermacs-clear-model-cache () + "Clear the cached models, forcing a fresh fetch on next use." + (interactive) + (setq aidermacs--cached-models nil) + (message "Model cache cleared")) ;;;###autoload (defun aidermacs-change-model () "Interactively select and change AI model in current aidermacs session." (interactive) - (let ((model (aidermacs--select-model))) - (when model - (aidermacs--send-command (format "/model %s" model) t)))) + (when (and aidermacs--cached-models + (equal aidermacs--cached-models aidermacs-popular-models) + (fboundp 'aidermacs-buffer-name) + (get-buffer (aidermacs-buffer-name))) + (setq aidermacs--cached-models nil)) + + (if aidermacs--cached-models + (aidermacs--select-model) + (aidermacs--get-available-models))) (provide 'aidermacs-models) diff --git a/aidermacs.el b/aidermacs.el index c62613b5b3..c7d93d2062 100644 --- a/aidermacs.el +++ b/aidermacs.el @@ -185,6 +185,7 @@ This function can be customized or redefined by the user." ["Others" ("H" "Session History" aidermacs-show-output-history) ("C" "Copy Last Aidermacs Output" aidermacs-get-last-output) + ("O" "Clear Model Selection Cache" aidermacs-clear-model-cache) ("l" "Clear Buffer" aidermacs-clear) ("h" "Aider Help" aidermacs-help)]]) @@ -448,13 +449,11 @@ Sends the \"/ls\" command and returns the list of files via callback." (aidermacs--send-command "/ls" t (lambda (output) - (condition-case nil - (if-let* ((files (aidermacs--parse-ls-output output)) - (file (completing-read "Select file to drop: " files nil t))) - (progn - (aidermacs--send-command (format "/drop %s" file))) - (message "No files available to drop")) - (quit (message "Drop file cancelled")))))) + (with-local-quit + (if-let* ((files (aidermacs--parse-ls-output output)) + (file (completing-read "Select file to drop: " files nil t))) + (aidermacs--send-command (format "/drop %s" file)) + (message "No files available to drop")))))) ;;;###autoload