Delete outdated packages
This commit is contained in:
parent
79029fc03b
commit
e2538b3666
@ -1,435 +0,0 @@
|
|||||||
;;; llama.el --- AI-assisted Emacs -*- lexical-binding: t; -*-
|
|
||||||
|
|
||||||
;; Copyright (C) 2024 Jeremy Isaac Dormitzer
|
|
||||||
|
|
||||||
;; Author: Jeremy Isaac Dormitzer <jeremy.dormitzer@gmail.com>
|
|
||||||
;; Package-Requires: ((emacs "28.1") (llm "0.15") (markdown-mode "2.7") (s "1.13") (spinner "1.7.4"))
|
|
||||||
|
|
||||||
;; This program is free software; you can redistribute it and/or modify
|
|
||||||
;; it under the terms of the GNU General Public License as published by
|
|
||||||
;; the Free Software Foundation, either version 3 of the License, or
|
|
||||||
;; (at your option) any later version.
|
|
||||||
|
|
||||||
;; This program is distributed in the hope that it will be useful,
|
|
||||||
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
;; GNU General Public License for more details.
|
|
||||||
|
|
||||||
;; You should have received a copy of the GNU General Public License
|
|
||||||
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
;;; Commentary:
|
|
||||||
|
|
||||||
;; Provides AI assistance features via llm.el.
|
|
||||||
|
|
||||||
;;; Code:
|
|
||||||
(require 'llm)
|
|
||||||
(require 's)
|
|
||||||
(require 'spinner)
|
|
||||||
;; for font-locking
|
|
||||||
(require 'markdown-mode)
|
|
||||||
|
|
||||||
(defvar llama-llm-provider nil
|
|
||||||
"The llm.el provider to use.")
|
|
||||||
|
|
||||||
(defvar llama-chat-prompt-symbol "> "
|
|
||||||
"The symbol used to indicate the user's input in the chat buffer.")
|
|
||||||
|
|
||||||
(defvar llama-chat-default-name "*llama-chat*"
|
|
||||||
"The default name for the chat buffer.")
|
|
||||||
|
|
||||||
(defvar llama-chat-default-initial-prompt-args
|
|
||||||
'("Briefly greet the user without mentioning your name and ask how you can help."
|
|
||||||
:context "You are a helpful AI assistant running inside the Emacs text editor.")
|
|
||||||
"The arguments for the default initial chat prompt.")
|
|
||||||
|
|
||||||
(defvar llama-spinner-type 'progress-bar
|
|
||||||
"The mode-line spinner type from spinner.el to use when waiting for the AI.")
|
|
||||||
|
|
||||||
(defvar-local llama-current-chat-prompt nil
|
|
||||||
"Chat prompt object for the current buffer.")
|
|
||||||
|
|
||||||
(defvar-local llama-current-chat-filter nil
|
|
||||||
"Filter function for the current chat buffer.")
|
|
||||||
|
|
||||||
(defvar-local llama-user-input-begin-marker nil
|
|
||||||
"Marker for the beginning of the user's input.")
|
|
||||||
|
|
||||||
(defvar-local llama-user-input-end-marker nil
|
|
||||||
"Marker for the end of the user's input.")
|
|
||||||
|
|
||||||
(defvar-local llama-ai-response-begin-marker nil
|
|
||||||
"Marker for the AI's response.")
|
|
||||||
|
|
||||||
(defvar-local llama-ai-response-end-marker nil
|
|
||||||
"Marker for the AI's response.")
|
|
||||||
|
|
||||||
(defvar-local llama-waiting-for-ai nil
|
|
||||||
"True if we are waiting for the AI to respond.")
|
|
||||||
|
|
||||||
(defun llama-chat-streaming-to-chat-buffer (provider prompt buffer point finish-callback)
|
|
||||||
"A version of `llm-chat-streaming-to-point' that sets inhibit-read-only to t in the insertion callback."
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(save-excursion
|
|
||||||
(let ((start (make-marker))
|
|
||||||
(end (make-marker)))
|
|
||||||
(set-marker start point)
|
|
||||||
(set-marker end point)
|
|
||||||
(set-marker-insertion-type start nil)
|
|
||||||
(set-marker-insertion-type end t)
|
|
||||||
(setq llama-waiting-for-ai t)
|
|
||||||
(spinner-start llama-spinner-type)
|
|
||||||
(cl-flet ((insert-text (text)
|
|
||||||
;; Erase and insert the new text between the marker cons.
|
|
||||||
(with-current-buffer (marker-buffer start)
|
|
||||||
(let ((inhibit-read-only t))
|
|
||||||
(save-excursion
|
|
||||||
(goto-char start)
|
|
||||||
(delete-region start end)
|
|
||||||
(insert text))))))
|
|
||||||
(llm-chat-streaming provider prompt
|
|
||||||
(lambda (text) (insert-text text))
|
|
||||||
(lambda (text)
|
|
||||||
(spinner-stop)
|
|
||||||
(insert-text text)
|
|
||||||
(funcall finish-callback text)
|
|
||||||
(setq llama-waiting-for-ai nil))
|
|
||||||
(lambda (_ msg)
|
|
||||||
(spinner-stop)
|
|
||||||
(setq llama-waiting-for-ai nil)
|
|
||||||
(error "Error calling the LLM: %s" msg))))))))
|
|
||||||
|
|
||||||
(defun llama-chat-eldoc-function (cb)
|
|
||||||
(cond
|
|
||||||
((markdown-link-p) (funcall cb (markdown-link-url) :thing (nth 2 (markdown-link-at-pos (point)))))))
|
|
||||||
|
|
||||||
(defun llama-chat-mode-initialize ()
|
|
||||||
"Set up a new chat buffer."
|
|
||||||
(setq llama-user-input-begin-marker (make-marker)
|
|
||||||
llama-user-input-end-marker (make-marker)
|
|
||||||
llama-ai-response-begin-marker (make-marker)
|
|
||||||
llama-ai-response-end-marker (make-marker))
|
|
||||||
(set-marker-insertion-type llama-user-input-end-marker t)
|
|
||||||
(set-marker-insertion-type llama-ai-response-end-marker t)
|
|
||||||
(set-marker llama-ai-response-begin-marker (point-max))
|
|
||||||
(set-marker llama-ai-response-end-marker (point-max)))
|
|
||||||
|
|
||||||
(defun llama-chat-self-insert-advice (&rest _)
|
|
||||||
"Makes sure that the point is within the user input zone whenever typing input."
|
|
||||||
(when (and (eq major-mode 'llama-chat-mode)
|
|
||||||
(< (point) (marker-position llama-user-input-begin-marker)))
|
|
||||||
(goto-char llama-user-input-end-marker)))
|
|
||||||
|
|
||||||
(advice-add 'self-insert-command :before #'llama-chat-self-insert-advice)
|
|
||||||
|
|
||||||
(define-derived-mode llama-chat-mode text-mode "Llama"
|
|
||||||
"Major mode for chatting with the AI."
|
|
||||||
:interactive nil
|
|
||||||
:group 'llama
|
|
||||||
;; Use markdown-mode for font-locking
|
|
||||||
(setq font-lock-defaults
|
|
||||||
'(markdown-mode-font-lock-keywords
|
|
||||||
nil nil nil nil
|
|
||||||
(font-lock-multiline . t)
|
|
||||||
(font-lock-syntactic-face-function . markdown-syntactic-face)
|
|
||||||
(font-lock-extra-managed-props
|
|
||||||
. (composition display invisible keymap help-echo mouse-face))))
|
|
||||||
(add-hook 'eldoc-documentation-functions #'llama-chat-eldoc-function nil t)
|
|
||||||
(setq-local window-point-insertion-type t)
|
|
||||||
(llama-chat-mode-initialize))
|
|
||||||
|
|
||||||
(defun llama-chat-buffer-name ()
|
|
||||||
"*llama-chat*")
|
|
||||||
|
|
||||||
(cl-defun llama-ai-response-finished-callback (&key callback)
|
|
||||||
(lambda (text)
|
|
||||||
(put-text-property llama-ai-response-begin-marker
|
|
||||||
llama-ai-response-end-marker
|
|
||||||
'read-only t)
|
|
||||||
(let ((inhibit-read-only t))
|
|
||||||
(save-excursion
|
|
||||||
(goto-char (point-max))
|
|
||||||
(insert (propertize "\n\n" 'read-only t))
|
|
||||||
(insert (propertize llama-chat-prompt-symbol 'read-only t 'rear-nonsticky '(read-only)))
|
|
||||||
(set-marker llama-user-input-begin-marker (point))
|
|
||||||
(set-marker llama-user-input-end-marker llama-user-input-begin-marker)
|
|
||||||
(set-marker llama-ai-response-begin-marker (point))
|
|
||||||
(set-marker llama-ai-response-end-marker llama-ai-response-begin-marker)))
|
|
||||||
(goto-char llama-user-input-begin-marker)
|
|
||||||
(when callback (funcall callback text))))
|
|
||||||
|
|
||||||
(cl-defun llama-chat-send-prompt (name prompt &key filter callback)
|
|
||||||
"Send the PROMPT to the chat buffer named NAME.
|
|
||||||
|
|
||||||
If FILTER is provided, it should be a function that accepts the raw AI response
|
|
||||||
and two callback arguments `insert' and `send'. In the filter function, call
|
|
||||||
`insert' to insert text into the chat buffer or `send' to send additional text
|
|
||||||
to the AI (e.g. to provide function call results).
|
|
||||||
|
|
||||||
If CALLBACK is provided, it will be called with the raw AI response text after
|
|
||||||
it has been inserted into the chat buffer."
|
|
||||||
(with-current-buffer name
|
|
||||||
(if filter
|
|
||||||
(cl-flet ((insert (text)
|
|
||||||
(let ((inhibit-read-only t))
|
|
||||||
(save-excursion
|
|
||||||
(goto-chat llama-ai-response-begin-marker)
|
|
||||||
(insert text)))
|
|
||||||
(funcall (llama-ai-response-finished-callback :callback callback) text))
|
|
||||||
(send (text)
|
|
||||||
(llm-chat-prompt-append-response prompt text)
|
|
||||||
(llama-chat-send-prompt name prompt :filter filter)))
|
|
||||||
(llm-chat-async llama-llm-provider prompt
|
|
||||||
(lambda (response)
|
|
||||||
(funcall filter response insert send))))
|
|
||||||
(llama-chat-streaming-to-chat-buffer llama-llm-provider
|
|
||||||
prompt
|
|
||||||
(current-buffer)
|
|
||||||
llama-ai-response-begin-marker
|
|
||||||
(llama-ai-response-finished-callback :callback callback)))))
|
|
||||||
|
|
||||||
(cl-defun llama-chat-buffer (name prompt &key provider filter callback)
|
|
||||||
(let ((buffer (get-buffer-create name)))
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(unless (eq major-mode 'llama-chat-mode)
|
|
||||||
(llama-chat-mode)
|
|
||||||
(when provider
|
|
||||||
(setq-local llama-llm-provider provider))
|
|
||||||
(when filter
|
|
||||||
(setq-local llama-current-chat-filter filter))
|
|
||||||
(setq llama-current-chat-prompt prompt)
|
|
||||||
(llama-chat-send-prompt name prompt :filter filter :callback callback)))
|
|
||||||
buffer))
|
|
||||||
|
|
||||||
(defun llama-chat-send ()
|
|
||||||
(interactive)
|
|
||||||
(unless (eq major-mode 'llama-chat-mode)
|
|
||||||
(error "Not in a llama-chat buffer"))
|
|
||||||
(let ((input (s-trim
|
|
||||||
(buffer-substring-no-properties llama-user-input-begin-marker
|
|
||||||
llama-user-input-end-marker))))
|
|
||||||
(when (s-present? input)
|
|
||||||
(llm-chat-prompt-append-response llama-current-chat-prompt input)
|
|
||||||
(save-excursion
|
|
||||||
(let ((inhibit-read-only t))
|
|
||||||
(goto-char llama-user-input-end-marker)
|
|
||||||
(insert (propertize "\n\n" 'read-only t))
|
|
||||||
(set-marker llama-ai-response-begin-marker (point))))
|
|
||||||
(llama-chat-send-prompt (current-buffer)
|
|
||||||
llama-current-chat-prompt
|
|
||||||
:filter llama-current-chat-filter))))
|
|
||||||
|
|
||||||
(defun llama-chat-follow-link ()
|
|
||||||
(interactive)
|
|
||||||
(cond
|
|
||||||
((button-at (point)) (push-button (point)))
|
|
||||||
((markdown-link-p) (markdown-follow-link-at-point))))
|
|
||||||
|
|
||||||
(defun llama-chat-context-action ()
|
|
||||||
"Perform a contextual action in the chat buffer based on the point:
|
|
||||||
|
|
||||||
* follows the link at point
|
|
||||||
* submits the input if point is within the user input zone"
|
|
||||||
(interactive)
|
|
||||||
(cond
|
|
||||||
((markdown-link-p) (llama-chat-follow-link))
|
|
||||||
((and (>= (point) (marker-position llama-user-input-begin-marker))
|
|
||||||
(<= (point) (marker-position llama-user-input-end-marker))
|
|
||||||
(not llama-waiting-for-ai))
|
|
||||||
(llama-chat-send))))
|
|
||||||
|
|
||||||
(defun llama-chat-next-prompt ()
|
|
||||||
"Jump to the next prompt in the chat buffer."
|
|
||||||
(interactive)
|
|
||||||
(let ((found (save-excursion
|
|
||||||
(next-line)
|
|
||||||
(search-forward-regexp (rx line-start (literal llama-chat-prompt-symbol)) nil t))))
|
|
||||||
(when found
|
|
||||||
(goto-char found)
|
|
||||||
(beginning-of-line))))
|
|
||||||
|
|
||||||
(defun llama-chat-previous-prompt ()
|
|
||||||
"Jump to the previous prompt in the chat buffer."
|
|
||||||
(interactive)
|
|
||||||
(let ((found (save-excursion
|
|
||||||
(previous-line)
|
|
||||||
(search-backward-regexp (rx line-start (literal llama-chat-prompt-symbol)) nil t))))
|
|
||||||
(when found
|
|
||||||
(goto-char found)
|
|
||||||
(beginning-of-line))))
|
|
||||||
|
|
||||||
(defun llama-chat ()
|
|
||||||
"Start a chat with the AI."
|
|
||||||
(interactive)
|
|
||||||
(pop-to-buffer (llama-chat-buffer
|
|
||||||
llama-chat-default-name
|
|
||||||
(apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args))))
|
|
||||||
|
|
||||||
(defun llama-doctor ()
|
|
||||||
"Start a psycotherapy session with the AI."
|
|
||||||
(interactive)
|
|
||||||
(pop-to-buffer (llama-chat-buffer
|
|
||||||
"*llama-doctor*"
|
|
||||||
(llm-make-chat-prompt
|
|
||||||
"Briefly greet the client without mentioning your name and ask how you can help."
|
|
||||||
:context "You are an empathetic therapist."))))
|
|
||||||
|
|
||||||
(keymap-set llama-chat-mode-map "RET" #'llama-chat-context-action)
|
|
||||||
(keymap-set llama-chat-mode-map "S-<return>" #'newline)
|
|
||||||
(keymap-set llama-chat-mode-map "C-j" #'newline)
|
|
||||||
(keymap-set llama-chat-mode-map "C-c C-c" #'llama-chat-send)
|
|
||||||
(keymap-set llama-chat-mode-map "C-c C-n" #'llama-chat-next-prompt)
|
|
||||||
(keymap-set llama-chat-mode-map "C-c C-p" #'llama-chat-previous-prompt)
|
|
||||||
(keymap-set llama-chat-mode-map "M-n" #'markdown-next-link)
|
|
||||||
(keymap-set llama-chat-mode-map "M-p" #'markdown-previous-link)
|
|
||||||
(when (featurep 'evil)
|
|
||||||
(evil-define-key 'normal llama-chat-mode-map
|
|
||||||
(kbd "RET") #'llama-chat-follow-link
|
|
||||||
"[[" #'llama-chat-previous-prompt
|
|
||||||
"]]" #'llama-chat-next-prompt))
|
|
||||||
|
|
||||||
(cl-defun llama-send-string-to-chat (name string &key user-visible-string initial-prompt)
|
|
||||||
"Send STRING to the chat named NAME.
|
|
||||||
|
|
||||||
If USER-VISIBLE-STRING is provided, display that as the user input in the chat
|
|
||||||
buffer instead of the original string."
|
|
||||||
(cl-flet ((send (&rest _args)
|
|
||||||
(with-current-buffer name
|
|
||||||
(save-excursion
|
|
||||||
(let ((inhibit-read-only t))
|
|
||||||
(goto-char llama-user-input-begin-marker)
|
|
||||||
(insert (or user-visible-string string))
|
|
||||||
(goto-char llama-user-input-end-marker)
|
|
||||||
(insert (propertize "\n\n" 'read-only t))
|
|
||||||
(set-marker llama-ai-response-begin-marker (point))))
|
|
||||||
(llm-chat-prompt-append-response llama-current-chat-prompt string)
|
|
||||||
(llama-chat-send-prompt name llama-current-chat-prompt :filter llama-current-chat-filter))))
|
|
||||||
(if (get-buffer name)
|
|
||||||
(send)
|
|
||||||
(pop-to-buffer (llama-chat-buffer
|
|
||||||
name
|
|
||||||
(apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args)
|
|
||||||
:callback #'send)))))
|
|
||||||
|
|
||||||
(defun llama-ask-region (start end prompt &optional name)
|
|
||||||
"Ask the AI in buffer NAME the PROMPT about the region between START and END.
|
|
||||||
|
|
||||||
NAME defaults to `llama-chat-default-name'."
|
|
||||||
(interactive (list (region-beginning)
|
|
||||||
(region-end)
|
|
||||||
(read-string "Prompt: ")
|
|
||||||
(if current-prefix-arg
|
|
||||||
(read-string "Chat buffer: ")
|
|
||||||
llama-chat-default-name)))
|
|
||||||
(let ((input (format "\n%s\n\n%s" (buffer-substring-no-properties start end) prompt)))
|
|
||||||
(llama-send-string-to-chat
|
|
||||||
(or name llama-chat-default-name)
|
|
||||||
input)
|
|
||||||
(display-buffer (or name llama-chat-default-name))))
|
|
||||||
|
|
||||||
(defun llama-ask-buffer (buffer prompt &optional name)
|
|
||||||
"Ask the AI in buffer NAME the PROMPT about the BUFFER (interactively, the current buffer).
|
|
||||||
|
|
||||||
NAME defaults to `llama-chat-default-name'."
|
|
||||||
(interactive (list (current-buffer)
|
|
||||||
(read-string "Prompt: ")
|
|
||||||
(if current-prefix-arg
|
|
||||||
(read-string "Chat buffer: ")
|
|
||||||
llama-chat-default-name)))
|
|
||||||
(let* ((input (format "%s\n\n%s" (buffer-substring-no-properties (point-min) (point-max)) prompt))
|
|
||||||
(buf (current-buffer))
|
|
||||||
(button (buttonize
|
|
||||||
(format "<Buffer: %s>" (current-buffer))
|
|
||||||
(lambda (_)
|
|
||||||
(pop-to-buffer buf)))))
|
|
||||||
(llama-send-string-to-chat
|
|
||||||
(or name llama-chat-default-name)
|
|
||||||
input
|
|
||||||
:user-visible-string (format "%s\n\n%s" button prompt))
|
|
||||||
(display-buffer (or name llama-chat-default-name))))
|
|
||||||
|
|
||||||
(defun llama-replace-in-region (start end prompt)
|
|
||||||
"Replace the region between START and END with the AI's response to PROMPT. Requires confirmation."
|
|
||||||
(interactive "r\nsPrompt: ")
|
|
||||||
(let ((buffer (current-buffer))
|
|
||||||
(llm-prompt (llm-make-chat-prompt (format "PROMPT:\n%s\n\nINPUT:\n%s\n" prompt (buffer-substring-no-properties start end))
|
|
||||||
:context "You are an AI assistant tasked with generating replacement text based on some input text and a prompt. You will be given a PROMPT and an INPUT, and must produce a REPLACEMENT that replaces the original input and an EXPLANATION that explains why the replacement was chosen. Format your answer like this:
|
|
||||||
EXPLANATION:
|
|
||||||
<explanation>
|
|
||||||
REPLACEMENT:
|
|
||||||
<replacement>
|
|
||||||
|
|
||||||
Do not include any additonal notes or commentary outside of the explanation section - all text following the REPLACEMENT: label should be the verbatim replacement."
|
|
||||||
:examples '(("PROMPT:\nCan you fix the grammar in this sentence?\n\nINPUT:\nI loves to eat pizza!\n"
|
|
||||||
.
|
|
||||||
"EXPLANATION:\nThe correct conjugation for the verb \"love\" in first person singular is \"I love\".\nREPLACEMENT:\nI love to eat pizza!")
|
|
||||||
("PROMPT:\nLowercase all the keys of this JSON object\n\nINPUT:\n{\"Foo\": \"bar\", \"Baz\": \"qux\"}\n"
|
|
||||||
.
|
|
||||||
"EXPLANATION:\nI made all the keys of the JSON object lowercase\nREPLACEMENT:\n{\"foo\": \"bar\", \"baz\": \"qux\"}")
|
|
||||||
("PROMPT:\nRewrite this into a list of bullet points\n\nINPUT:\nWilliam Barry Wood, Jr. (May 4, 1910 – March 9, 1971) was an American football player and medical educator. Wood played quarterback for Harvard during the 1929–1931 seasons and was one of the most prominent football players of his time. He was elected to the College Football Hall of Fame in 1980.\n"
|
|
||||||
.
|
|
||||||
"EXPLANATION:\nHere is the rewritten text in a list of bullet points\nREPLACEMENT:\n• William Barry Wood, Jr. (May 4, 1910 – March 9, 1971) was an American football player and medical educator.
|
|
||||||
• He played quarterback for Harvard University during the seasons:
|
|
||||||
+ 1929
|
|
||||||
+ 1930
|
|
||||||
+ 1931
|
|
||||||
• He was one of the most prominent football players of his time.
|
|
||||||
• Wood was elected to the College Football Hall of Fame in 1980.")))))
|
|
||||||
(spinner-start llama-spinner-type)
|
|
||||||
(llm-chat-async llama-llm-provider
|
|
||||||
llm-prompt
|
|
||||||
(lambda (response)
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(spinner-stop))
|
|
||||||
(with-temp-buffer
|
|
||||||
(insert response)
|
|
||||||
(goto-char (point-min))
|
|
||||||
(let* ((exp-start (save-excursion
|
|
||||||
(when (search-forward "EXPLANATION:")
|
|
||||||
(point))))
|
|
||||||
(replace-start (save-excursion
|
|
||||||
(when (search-forward "REPLACEMENT:")
|
|
||||||
(point))))
|
|
||||||
(exp-end (when replace-start (- replace-start (length "REPLACEMENT:"))))
|
|
||||||
(explanation (when (and exp-start exp-end)
|
|
||||||
(s-trim (buffer-substring-no-properties exp-start exp-end))))
|
|
||||||
(replacement (when replace-start
|
|
||||||
(s-trim (buffer-substring-no-properties replace-start (point-max))))))
|
|
||||||
(unless replacement
|
|
||||||
(error "LLM did not return a valid replacement"))
|
|
||||||
(when (y-or-n-p (format "Explanation:\n%s\n\nReplacment:\n%s\nAccept AI replacement?"
|
|
||||||
explanation
|
|
||||||
replacement))
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(save-excursion
|
|
||||||
(delete-region start end)
|
|
||||||
(goto-char start)
|
|
||||||
(insert replacement)))))))
|
|
||||||
(lambda (_ msg) (error "Error calling the LLM: %s" msg)))))
|
|
||||||
|
|
||||||
(defun llama-add-comments (start end)
|
|
||||||
"Add explanatory comments to the code between START and END."
|
|
||||||
(interactive "r")
|
|
||||||
(llama-replace-in-region
|
|
||||||
start
|
|
||||||
end
|
|
||||||
"Add concise comments explaining parts of this code that would be otherwise difficult to interpret. Comments belong either on a line by themselves above the code they explain, or inline with the code at the end of the line. Answer with the complete code including the comments. Do not wrap your response in code fences or other markup."))
|
|
||||||
|
|
||||||
(defun llama-fill (start end)
|
|
||||||
"Replace the //fill keyword with the missing logic between START and END."
|
|
||||||
(interactive "r")
|
|
||||||
(llama-replace-in-region
|
|
||||||
start
|
|
||||||
end
|
|
||||||
"Replace the keyword //fill in the input with the missing logic. Answer with the complete code including filled-in logic. Do not wrap your response in code fences or other markup."))
|
|
||||||
|
|
||||||
(defun llama-rename-symbols (start end)
|
|
||||||
"Rename code symbols between START and END for clarity and expressiveness."
|
|
||||||
(interactive "r")
|
|
||||||
(llama-replace-in-region
|
|
||||||
start
|
|
||||||
end
|
|
||||||
"Rename code symbols (function, variable, class) for clarity and expressiveness. Answer with only the complete code. Do not wrap your response in code fences or other markup."))
|
|
||||||
|
|
||||||
(provide 'llama)
|
|
||||||
;;; llama.el ends here
|
|
@ -1,254 +0,0 @@
|
|||||||
;;; llm.el --- An Emacs interface to the LLM command-line tool -*- lexical-binding: t; -*-
|
|
||||||
|
|
||||||
;; Copyright (C) 2024 Jeremy Isaac Dormitzer
|
|
||||||
|
|
||||||
;; Author: Jeremy Isaac Dormitzer <jeremydormitzer@hummingbird.co>
|
|
||||||
;; Version: 0.1
|
|
||||||
;; Package-Requires: ((emacs "24.3") (s "1.13") (markdown-mode "2.7"))
|
|
||||||
;; Keywords: tools
|
|
||||||
|
|
||||||
;; This program is free software; you can redistribute it and/or modify
|
|
||||||
;; it under the terms of the GNU General Public License as published by
|
|
||||||
;; the Free Software Foundation, either version 3 of the License, or
|
|
||||||
;; (at your option) any later version.
|
|
||||||
|
|
||||||
;; This program is distributed in the hope that it will be useful,
|
|
||||||
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
;; GNU General Public License for more details.
|
|
||||||
|
|
||||||
;; You should have received a copy of the GNU General Public License
|
|
||||||
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
;;; Commentary:
|
|
||||||
|
|
||||||
;; This package provides an Emacs interface to the LLM command-line tool.
|
|
||||||
|
|
||||||
;;; Code:
|
|
||||||
(require 's)
|
|
||||||
(require 'markdown-mode)
|
|
||||||
|
|
||||||
(defcustom llm-executable "llm"
|
|
||||||
"Path to the llm executable."
|
|
||||||
:type 'string
|
|
||||||
:group 'llm)
|
|
||||||
|
|
||||||
(defcustom llm-model nil
|
|
||||||
"The llm model to use."
|
|
||||||
:type 'string
|
|
||||||
:group 'llm)
|
|
||||||
|
|
||||||
(defcustom llm-max-tokens 5000
|
|
||||||
"The maximum number of tokens to generate."
|
|
||||||
:type 'integer
|
|
||||||
:group 'llm)
|
|
||||||
|
|
||||||
(defun llm--ensure-executable ()
|
|
||||||
"Ensure that the llm executable is available."
|
|
||||||
(unless (executable-find llm-executable)
|
|
||||||
(error
|
|
||||||
"llm executable not found: see https://llm.datasette.io/en/stable/index.html for installation instructions")))
|
|
||||||
|
|
||||||
(defun llm--process-filter (proc string)
|
|
||||||
(let* ((buffer (process-buffer proc))
|
|
||||||
(window (get-buffer-window buffer))
|
|
||||||
(string (replace-regexp-in-string "\r\n" "\n" string)))
|
|
||||||
(when (buffer-live-p buffer)
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(if (not (mark)) (push-mark))
|
|
||||||
(exchange-point-and-mark) ;Use the mark to represent the cursor location
|
|
||||||
(dolist (char (append string nil))
|
|
||||||
(cond ((char-equal char ?\r)
|
|
||||||
(move-beginning-of-line 1))
|
|
||||||
((char-equal char ?\n)
|
|
||||||
(move-end-of-line 1) (newline))
|
|
||||||
(t
|
|
||||||
(if (/= (point) (point-max)) ;Overwrite character
|
|
||||||
(delete-char 1))
|
|
||||||
(insert char))))
|
|
||||||
(exchange-point-and-mark)))
|
|
||||||
(if window
|
|
||||||
(with-selected-window window
|
|
||||||
(goto-char (point-max))))))
|
|
||||||
|
|
||||||
(define-derived-mode llm-mode markdown-mode "llm"
|
|
||||||
"Major mode for LLM output.")
|
|
||||||
|
|
||||||
(define-key llm-mode-map
|
|
||||||
(kbd "q") #'quit-window)
|
|
||||||
|
|
||||||
(when (fboundp #'evil-define-key)
|
|
||||||
(evil-define-key 'normal llm-mode-map
|
|
||||||
(kbd "q") #'quit-window))
|
|
||||||
|
|
||||||
(defun llm--run-async-process-sentinal (proc string)
|
|
||||||
(with-current-buffer (process-buffer proc)
|
|
||||||
(goto-char (point-max))
|
|
||||||
(newline)
|
|
||||||
(newline)
|
|
||||||
(insert (format "[llm %s]" (s-trim string)))))
|
|
||||||
|
|
||||||
(defun llm--run-async (name buffer-name &rest llm-args)
|
|
||||||
"Run llm with LLM-ARGS asynchronously.
|
|
||||||
|
|
||||||
The process is named NAME and runs in BUFFER-NAME."
|
|
||||||
(llm--ensure-executable)
|
|
||||||
(when-let ((existing-buffer (get-buffer buffer-name)))
|
|
||||||
(kill-buffer existing-buffer))
|
|
||||||
(let ((proc (make-process :name name
|
|
||||||
:buffer buffer-name
|
|
||||||
:command (cons llm-executable llm-args)
|
|
||||||
:filter #'llm--process-filter)))
|
|
||||||
(with-current-buffer (process-buffer proc)
|
|
||||||
(llm-mode))
|
|
||||||
(set-process-sentinel proc #'llm--run-async-process-sentinal)))
|
|
||||||
|
|
||||||
(cl-defun llm--prompt-args (&key prompt system-prompt options extra-args)
|
|
||||||
"Construct the arguments to prompt LLM with PROMPT."
|
|
||||||
(let* ((opts (-mapcat (lambda (pair)
|
|
||||||
(list "-o" (car pair) (cdr pair)))
|
|
||||||
options))
|
|
||||||
(sys (when system-prompt
|
|
||||||
(list "-s" system-prompt)))
|
|
||||||
(model (when llm-model
|
|
||||||
(list "-m" llm-model))))
|
|
||||||
(append (list "prompt") model sys opts extra-args (list prompt))))
|
|
||||||
|
|
||||||
(cl-defun llm--prompt-async (&key prompt system-prompt options extra-args name buffer-name)
|
|
||||||
"Prompt LLM asynchronously with PROMPT and other options."
|
|
||||||
(let* ((name (or name "llm-prompt"))
|
|
||||||
(buffer-name (or buffer-name (format "*%s*" name)))
|
|
||||||
(args (llm--prompt-args :prompt prompt
|
|
||||||
:system-prompt system-prompt
|
|
||||||
:options options
|
|
||||||
:extra-args extra-args)))
|
|
||||||
(apply #'llm--run-async name buffer-name args)))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(cl-defun llm-call (callback &rest llm-args)
|
|
||||||
"Call llm with LLM-ARGS and call CALLBACK with the result."
|
|
||||||
(when-let ((buf (get-buffer " *llm-call*")))
|
|
||||||
(kill-buffer buf))
|
|
||||||
(let ((proc (apply #'llm--run-async "llm-call" " *llm-call*" llm-args)))
|
|
||||||
(set-process-sentinel proc
|
|
||||||
(lambda (proc event)
|
|
||||||
(unless (string= event "finished\n")
|
|
||||||
(error "llm-call failed: %s" (s-trim event)))
|
|
||||||
(with-current-buffer (process-buffer proc)
|
|
||||||
(goto-char (point-min))
|
|
||||||
(funcall callback (s-trim
|
|
||||||
(buffer-substring-no-properties
|
|
||||||
(point)
|
|
||||||
(point-max)))))
|
|
||||||
(kill-buffer (process-buffer proc))))))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(defun llm-set-model (model)
|
|
||||||
"Set the LLM model to MODEL."
|
|
||||||
(interactive (list (let* ((model-strings
|
|
||||||
(split-string (shell-command-to-string
|
|
||||||
(format "%s models" (executable-find llm-executable)))
|
|
||||||
"\n" t " "))
|
|
||||||
(models (mapcar
|
|
||||||
(lambda (s)
|
|
||||||
(cons s
|
|
||||||
(cadr
|
|
||||||
(s-match ".*?: \\(.*?\\)\\(?:[[:blank:]]\\|$\\)" s))))
|
|
||||||
model-strings))
|
|
||||||
(selected (completing-read "Model: " models)))
|
|
||||||
(alist-get selected models nil nil #'equal))))
|
|
||||||
(setq llm-model model))
|
|
||||||
|
|
||||||
(defvar llm-model-options-alist
|
|
||||||
`(("Meta-Llama-3-8B-Instruct" . (("max_tokens" . ,(number-to-string llm-max-tokens)))))
|
|
||||||
"Alist mapping model names to options to pass to llm.")
|
|
||||||
|
|
||||||
(defun llm--model-options (&optional model)
|
|
||||||
"Get the extra arguments for MODEL."
|
|
||||||
(let ((model (or model llm-model)))
|
|
||||||
(alist-get model llm-model-options-alist nil nil #'equal)))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(defun llm-prompt (query &optional system-prompt)
|
|
||||||
"Prompt llm with the QUERY and optionally SYSTEM-PROMPT."
|
|
||||||
(interactive (list (read-string "Query: " nil nil)
|
|
||||||
(when current-prefix-arg
|
|
||||||
(read-string "System prompt: " nil nil))))
|
|
||||||
(llm--prompt-async :prompt query :options (llm--model-options))
|
|
||||||
(switch-to-buffer "*llm-prompt*"))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(defun llm-prompt-buffer (system-prompt)
|
|
||||||
"Prompt llm with the contents of the current buffer and the SYSTEM-PROMPT."
|
|
||||||
(interactive "sSystem prompt: ")
|
|
||||||
(llm--prompt-async :prompt (buffer-substring-no-properties (point-min) (point-max))
|
|
||||||
:system-prompt system-prompt
|
|
||||||
:options (llm--model-options)
|
|
||||||
:name "llm-prompt-buffer"
|
|
||||||
:buffer-name "*llm-prompt-buffer*")
|
|
||||||
(switch-to-buffer "*llm-prompt-buffer*"))
|
|
||||||
|
|
||||||
(defun llm-prompt-region (system-prompt)
|
|
||||||
"Prompt llm with the contents of the region and the SYSTEM-PROMPT."
|
|
||||||
(interactive "sSystem prompt: ")
|
|
||||||
(llm--prompt-async :prompt (buffer-substring-no-properties (region-beginning) (region-end))
|
|
||||||
:system-prompt system-prompt
|
|
||||||
:options (llm--model-options)
|
|
||||||
:name "llm-prompt-region"
|
|
||||||
:buffer-name "*llm-prompt-region*")
|
|
||||||
(switch-to-buffer "*llm-prompt-region*"))
|
|
||||||
|
|
||||||
(defvar llm-chat-mode-map
|
|
||||||
(make-sparse-keymap)
|
|
||||||
"Keymap for `llm-chat-mode'.")
|
|
||||||
|
|
||||||
(defvar llm-chat-prompt-regexp "^> "
|
|
||||||
"Regexp to match the prompt in `llm-chat-mode'.")
|
|
||||||
|
|
||||||
(define-derived-mode llm-chat-mode comint-mode "llm-chat"
|
|
||||||
"Major mode for chatting with llm."
|
|
||||||
(setq comint-prompt-regexp llm-chat-prompt-regexp)
|
|
||||||
(setq comint-prompt-read-only t)
|
|
||||||
(setq comint-process-echoes t))
|
|
||||||
|
|
||||||
(cl-defun llm--chat-args (&key system-prompt options)
|
|
||||||
(let ((opts (-mapcat (lambda (pair)
|
|
||||||
(list "-o" (car pair) (cdr pair)))
|
|
||||||
options))
|
|
||||||
(sys (when system-prompt
|
|
||||||
(list "-s" system-prompt)))
|
|
||||||
(model (when llm-model
|
|
||||||
(list "-m" llm-model))))
|
|
||||||
(append (list "chat") model sys opts)))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(defun llm-chat (system-prompt &optional name)
|
|
||||||
"Start a chat session with llm, prompting it with SYSTEM-PROMPT, naming the process and buffer NAME."
|
|
||||||
(interactive (list (read-string "System prompt: " "You are a helpful AI assistant running inside the Emacs text editor.")
|
|
||||||
"llm-chat"))
|
|
||||||
(let* ((name (or name "llm-chat"))
|
|
||||||
(buffer-name (format "*%s*" name))
|
|
||||||
(buffer (get-buffer-create buffer-name))
|
|
||||||
(proc-alive (comint-check-proc buffer))
|
|
||||||
(process (get-buffer-process buffer)))
|
|
||||||
(unless proc-alive
|
|
||||||
(with-current-buffer buffer
|
|
||||||
(apply #'make-comint-in-buffer
|
|
||||||
name
|
|
||||||
buffer
|
|
||||||
llm-executable
|
|
||||||
nil
|
|
||||||
(llm--chat-args :system-prompt system-prompt
|
|
||||||
:options (llm--model-options)))
|
|
||||||
(llm-chat-mode)))
|
|
||||||
(when buffer
|
|
||||||
(pop-to-buffer buffer))))
|
|
||||||
|
|
||||||
;;;###autoload
|
|
||||||
(defun llm-doctor ()
|
|
||||||
"Start a psychotherapy session with llm."
|
|
||||||
(interactive)
|
|
||||||
(llm-chat "You are an empathetic therapist." "llm-doctor"))
|
|
||||||
|
|
||||||
(provide 'llm)
|
|
||||||
;;; llm.el ends here
|
|
Loading…
Reference in New Issue
Block a user