diff --git a/emacs/.emacs.d/packages/llama/llama.el b/emacs/.emacs.d/packages/llama/llama.el
index d7b0c93..62be3cf 100644
--- a/emacs/.emacs.d/packages/llama/llama.el
+++ b/emacs/.emacs.d/packages/llama/llama.el
@@ -34,6 +34,14 @@
(defvar llama-chat-prompt-symbol "> "
"The symbol used to indicate the user's input in the chat buffer.")
+(defvar llama-chat-default-name "*llama-chat*"
+ "The default name for the chat buffer.")
+
+(defvar llama-chat-default-initial-prompt-args
+ '("Briefly greet the user without mentioning your name and ask how you can help."
+ :context "You are a helpful AI assistant running inside the Emacs text editor.")
+ "The arguments for the default initial chat prompt.")
+
(defvar-local llama-current-chat-prompt nil
"Chat prompt object for the current buffer.")
@@ -129,7 +137,7 @@
(set-marker llama-ai-response-end-marker llama-ai-response-begin-marker)))
(goto-char llama-user-input-begin-marker))
-(defun llama-chat-buffer (name prompt &optional provider)
+(cl-defun llama-chat-buffer (name prompt &key provider callback)
(let ((buffer (get-buffer-create name)))
(with-current-buffer buffer
(unless (eq major-mode 'llama-chat-mode)
@@ -139,7 +147,13 @@
llama-current-chat-prompt
(current-buffer)
llama-ai-response-begin-marker
- #'llama-ai-response-finished-callback)))
+ (lambda ()
+ (let ((ai-response (buffer-substring-no-properties
+ llama-ai-response-begin-marker
+ llama-ai-response-end-marker)))
+ (llama-ai-response-finished-callback)
+ (when callback
+ (funcall callback ai-response)))))))
buffer))
(defun llama-chat-send ()
@@ -199,14 +213,13 @@
(goto-char found)
(beginning-of-line))))
-(defun llama-chat ()
+(defun llama-chat (&optional callback)
"Start a chat with the AI."
(interactive)
(pop-to-buffer (llama-chat-buffer
- "*llama-chat*"
- (llm-make-chat-prompt
- "Briefly greet the user without mentioning your name and ask how you can help."
- :context "You are a helpful AI assistant running inside the Emacs text editor."))))
+ llama-chat-default-name
+ (apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args)
+ :callback callback)))
(defun llama-doctor()
"Start a psycotherapy session with the AI."
@@ -231,5 +244,104 @@
"[[" #'llama-chat-previous-prompt
"]]" #'llama-chat-next-prompt))
+(cl-defun llama-send-string-to-chat (name string &key user-visible-string callback initial-prompt)
+ "Send STRING to the chat named NAME.
+
+If USER-VISIBLE-STRING is provided, display that in the chat buffer instead of the original string.
+If CALLBACK is provided, call it with the AI response when it's available.
+
+If a chat buffer with the given name isn't found, a new one is created using INITIAL-PROMPT, which defaults one made using to `llama-chat-default-initial-prompt-args'."
+ (cl-flet ((do-send (&rest _)
+ (with-current-buffer name
+ (delete-region llama-user-input-begin-marker llama-user-input-end-marker)
+ (save-excursion
+ (goto-char llama-user-input-begin-marker)
+ (insert (or user-visible-string string)))
+ (llm-chat-prompt-append-response llama-current-chat-prompt string)
+ (save-excursion
+ (let ((inhibit-read-only t))
+ (goto-char llama-user-input-end-marker)
+ (insert (propertize "\n\n" 'read-only t))
+ (set-marker llama-ai-response-begin-marker (point))))
+ (llama-chat-streaming-to-chat-buffer llama-llm-provider
+ llama-current-chat-prompt
+ (current-buffer)
+ llama-ai-response-begin-marker
+ (lambda ()
+ (let ((ai-response (buffer-substring-no-properties
+ llama-ai-response-begin-marker
+ llama-ai-response-end-marker)))
+ (llama-ai-response-finished-callback)
+ (when callback
+ (funcall callback ai-response))))))))
+ (if (get-buffer name)
+ (do-send)
+ (llama-chat-buffer name
+ (or initial-prompt
+ (apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args))
+ :callback #'do-send))))
+
+(defun llama-ask-region (start end prompt &optional name)
+ "Ask the AI in buffer NAME the PROMPT about the region between START and END.
+
+NAME defaults to `llama-chat-default-name'."
+ (interactive (list (region-beginning)
+ (region-end)
+ (read-string "Prompt: ")
+ (if current-prefix-arg
+ (read-string "Chat buffer: ")
+ llama-chat-default-name)))
+ (let ((input (format "%s\n%s" (buffer-substring-no-properties start end) prompt)))
+ (llama-send-string-to-chat
+ (or name llama-chat-default-name)
+ input)
+ (display-buffer (or name llama-chat-default-name))))
+
+(defun llama-ask-buffer (buffer prompt &optional name)
+ "Ask the AI in buffer NAME the PROMPT about the BUFFER (interactively, the current buffer).
+
+NAME defaults to `llama-chat-default-name'."
+ (interactive (list (current-buffer)
+ (read-string "Prompt: ")
+ (if current-prefix-arg
+ (read-string "Chat buffer: ")
+ llama-chat-default-name)))
+ (let ((input (format "%s\n%s" (buffer-substring-no-properties (point-min) (point-max)) prompt)))
+ (llama-send-string-to-chat
+ (or name llama-chat-default-name)
+ input)
+ (display-buffer (or name llama-chat-default-name))))
+
+(defun llama-replace-in-region (start end prompt &optional name)
+ "Replace the region between START and END with the AI's response to PROMPT (require confirmation)."
+ (interactive (list (region-beginning)
+ (region-end)
+ (read-string "Prompt: ")
+ (if current-prefix-arg
+ (read-string "Chat buffer: ")
+ llama-chat-default-name)))
+ (let ((buffer (current-buffer))
+ (input (format "Generate replacement text for the following INPUT given the PROMPT. In your response, delimit the suggested replacement with the markers and .
+INPUT: %s
+PROMPT: %s" (buffer-substring-no-properties start end) prompt)))
+ (llama-send-string-to-chat
+ (or name llama-chat-default-name)
+ input
+ :callback (lambda (response)
+ (let ((replacement (with-temp-buffer
+ (insert response)
+ (goto-char (point-min))
+ (let ((start (search-forward "" nil t))
+ (end (search-forward "" nil t)))
+ (when (and start end)
+ (buffer-substring-no-properties start (- end 10)))))))
+ (if replacement
+ (when (y-or-n-p (format "Replace region with AI suggestion? %s" replacement))
+ (with-current-buffer buffer
+ (delete-region start end)
+ (insert replacement)))
+ (message "AI did not generate a valid replacement.")))))
+ (display-buffer (or name llama-chat-default-name))))
+
(provide 'llama)
;;; llama.el ends here