Add llm-chat command and improve prompt code structure

This commit is contained in:
Jeremy Dormitzer 2024-05-20 11:33:32 -04:00
parent c618c4d3ce
commit df79eee678

View File

@ -81,6 +81,13 @@
(evil-define-key 'normal llm-mode-map
(kbd "q") #'quit-window))
(defun llm--run-async-process-sentinal (proc string)
(with-current-buffer (process-buffer proc)
(goto-char (point-max))
(newline)
(newline)
(insert (format "[llm %s]" string))))
(defun llm--run-async (name buffer-name &rest llm-args)
"Run llm with LLM-ARGS asynchronously.
@ -94,7 +101,28 @@ The process is named NAME and runs in BUFFER-NAME."
:filter #'llm--process-filter)))
(with-current-buffer (process-buffer proc)
(llm-mode))
(set-process-sentinel proc #'ignore)))
(set-process-sentinel proc #'llm--run-async-process-sentinal)))
(cl-defun llm--prompt-args (&key prompt system-prompt options extra-args)
"Construct the arguments to prompt LLM with PROMPT."
(let* ((opts (-mapcat (lambda (pair)
(list "-o" (car pair) (cdr pair)))
options))
(sys (when system-prompt
(list "-s" system-prompt)))
(model (when llm-model
(list "-m" llm-model))))
(append (list "prompt") model sys opts extra-args (list prompt))))
(cl-defun llm--prompt-async (&key prompt system-prompt options extra-args name buffer-name)
"Prompt LLM asynchronously with PROMPT and other options."
(let* ((name (or name "llm-prompt"))
(buffer-name (or buffer-name (format "*%s*" name)))
(args (llm--prompt-args :prompt prompt
:system-prompt system-prompt
:options options
:extra-args extra-args)))
(apply #'llm--run-async name buffer-name args)))
;;;###autoload
(cl-defun llm-call (callback &rest llm-args)
@ -131,49 +159,94 @@ The process is named NAME and runs in BUFFER-NAME."
(alist-get selected models nil nil #'equal))))
(setq llm-model model))
(defvar llm-model-extra-args-alist
`(("Meta-Llama-3-8B-Instruct" . ("-o" "max_tokens" ,(number-to-string llm-max-tokens))))
"Alist mapping model names to extra arguments to pass to llm.")
(defvar llm-model-options-alist
`(("Meta-Llama-3-8B-Instruct" . (("max_tokens" . ,(number-to-string llm-max-tokens)))))
"Alist mapping model names to options to pass to llm.")
(defun llm--prompt-args (query &rest extra-args)
"Return the arguments to prompt LLM with QUERY, appending EXTRA-ARGS."
(let* ((args nil)
(args (if-let ((extra-args (alist-get llm-model llm-model-extra-args-alist nil nil #'equal)))
(append extra-args args)
args))
(args (if llm-model
(append (list "--model" llm-model) args)
args))
(args (append args extra-args)))
(append (list "prompt") args (list query))))
(defun llm--model-options (&optional model)
"Get the extra arguments for MODEL."
(let ((model (or model llm-model)))
(alist-get model llm-model-options-alist nil nil #'equal)))
;;;###autoload
(defun llm-prompt (query)
"Prompt llm with the QUERY."
(interactive "sQuery: ")
(apply #'llm--run-async "llm-prompt" "*llm-prompt*" (llm--prompt-args query))
(defun llm-prompt (query &optional system-prompt)
"Prompt llm with the QUERY and optionally SYSTEM-PROMPT."
(interactive (list (read-string "Query: " nil nil)
(when current-prefix-arg
(read-string "System prompt: " nil nil))))
(llm--prompt-async :prompt query :options (llm--model-options))
(switch-to-buffer "*llm-prompt*"))
;;;###autoload
(defun llm-prompt-buffer (query)
"Prompt llm with the contents of the current buffer and the QUERY."
(interactive "sQuery: ")
(let ((extra-args (list "-s" (buffer-substring-no-properties (point-min) (point-max)))))
(apply #'llm--run-async
"llm-prompt-buffer"
"*llm-prompt-buffer*"
(apply #'llm--prompt-args query extra-args))
(switch-to-buffer "*llm-prompt-buffer*")))
(defun llm-prompt-buffer (system-prompt)
"Prompt llm with the contents of the current buffer and the SYSTEM-PROMPT."
(interactive "sSystem prompt: ")
(llm--prompt-async :prompt (buffer-substring-no-properties (point-min) (point-max))
:system-prompt system-prompt
:options (llm--model-options)
:name "llm-prompt-buffer"
:buffer-name "*llm-prompt-buffer*")
(switch-to-buffer "*llm-prompt-buffer*"))
(defun llm-prompt-region (query)
"Prompt llm with the contents of the region and the QUERY."
(interactive "sQuery: ")
(let ((extra-args (list "-s" (buffer-substring-no-properties (region-beginning) (region-end)))))
(apply #'llm--run-async
"llm-prompt-region"
"*llm-prompt-region*"
(apply #'llm--prompt-args query extra-args))
(switch-to-buffer "*llm-prompt-region*")))
(defun llm-prompt-region (system-prompt)
"Prompt llm with the contents of the region and the SYSTEM-PROMPT."
(interactive "sSystem prompt: ")
(llm--prompt-async :prompt (buffer-substring-no-properties (region-beginning) (region-end))
:system-prompt system-prompt
:options (llm--model-options)
:name "llm-prompt-region"
:buffer-name "*llm-prompt-region*")
(switch-to-buffer "*llm-prompt-region*"))
(defvar llm-chat-mode-map
(make-sparse-keymap)
"Keymap for `llm-chat-mode'.")
(defvar llm-chat-prompt-regexp "^> "
"Regexp to match the prompt in `llm-chat-mode'.")
(define-derived-mode llm-chat-mode comint-mode "llm-chat"
"Major mode for chatting with llm."
(setq comint-prompt-regexp llm-chat-prompt-regexp)
(setq comint-prompt-read-only t)
(setq comint-process-echoes t))
(cl-defun llm--chat-args (&key system-prompt options)
(let ((opts (-mapcat (lambda (pair)
(list "-o" (car pair) (cdr pair)))
options))
(sys (when system-prompt
(list "-s" system-prompt)))
(model (when llm-model
(list "-m" llm-model))))
(append (list "chat") model sys opts)))
(defun llm-chat (system-prompt &optional name)
"Start a chat session with llm, prompting it with SYSTEM-PROMPT, naming the process and buffer NAME."
(interactive (list (read-string "System prompt: " "You are a helpful AI assistant.")
"llm-chat"))
(let* ((name (or name "llm-chat"))
(buffer-name (format "*%s*" name))
(buffer (get-buffer-create buffer-name))
(proc-alive (comint-check-proc buffer))
(process (get-buffer-process buffer)))
(unless proc-alive
(with-current-buffer buffer
(apply #'make-comint-in-buffer
name
buffer
llm-executable
nil
(llm--chat-args :system-prompt system-prompt
:options (llm--model-options)))
(llm-chat-mode)))
(when buffer
(pop-to-buffer buffer))))
(defun llm-doctor ()
"Start a psychotherapy session with llm."
(interactive)
(llm-chat "You are an empathetic therapist." "llm-doctor"))
(provide 'llm)
;;; llm.el ends here