Enable llm.el to work with ollama models

This commit is contained in:
Jeremy Dormitzer 2024-05-17 13:30:09 -04:00
parent 71c86332c5
commit 3c09eb3cbd

View File

@ -121,16 +121,26 @@ The process is named NAME and runs in BUFFER-NAME."
(split-string (shell-command-to-string
(format "%s models" (executable-find llm-executable)))
"\n" t " "))
(models (mapcar (lambda (s)
(cons s (cadr (s-match ".*?: \\(.*?\\) -" s))))
model-strings))
(models (mapcar
(lambda (s)
(cons s
(cadr
(s-match ".*?: \\(.*?\\)\\(?:[[:blank:]]\\|$\\)" s))))
model-strings))
(selected (completing-read "Model: " models)))
(alist-get selected models nil nil #'equal))))
(setq llm-model model))
(defvar llm-model-extra-args-alist
`(("Meta-Llama-3-8B-Instruct" . ("-o" "max_tokens" ,(number-to-string llm-max-tokens))))
"Alist mapping model names to extra arguments to pass to llm.")
(defun llm--prompt-args (query &rest extra-args)
"Return the arguments to prompt LLM with QUERY, appending EXTRA-ARGS."
(let* ((args (list "-o" "max_tokens" (number-to-string llm-max-tokens)))
(let* ((args nil)
(args (if-let ((extra-args (alist-get llm-model llm-model-extra-args-alist nil nil #'equal)))
(append extra-args args)
args))
(args (if llm-model
(append (list "--model" llm-model) args)
args))