From 8db9cc7e8fc07ce0509651e3162cc35ca0b304b1 Mon Sep 17 00:00:00 2001 From: Jeremy Dormitzer Date: Thu, 23 May 2024 22:59:43 -0400 Subject: [PATCH] Make a version of llama-replace-in-region that does not have context --- emacs/.emacs.d/packages/llama/llama.el | 65 +++++++++++++++++++++++--- 1 file changed, 59 insertions(+), 6 deletions(-) diff --git a/emacs/.emacs.d/packages/llama/llama.el b/emacs/.emacs.d/packages/llama/llama.el index 9eb8db6..2615d15 100644 --- a/emacs/.emacs.d/packages/llama/llama.el +++ b/emacs/.emacs.d/packages/llama/llama.el @@ -354,20 +354,73 @@ NAME defaults to `llama-chat-default-name'." input) (display-buffer (or name llama-chat-default-name)))) +(defun llama-replace-in-region-with-context (start end prompt &optional name) + "Replace the region between START and END with the AI's response to PROMPT (require confirmation). + +Includes the surrounding buffer context in the prompt." + (interactive (list (region-beginning) + (region-end) + (read-string "Prompt: ") + (if current-prefix-arg + (completing-read "Chat buffer: " (mapcar (lambda (b) (buffer-name b)) + (match-buffers (lambda (b) + (with-current-buffer b + (eq major-mode 'llama-chat-mode)))))) + "*llama-replace*"))) + (let* ((buffer (current-buffer)) + (context-begin (save-excursion + (goto-char start) + (vertical-motion -15) + (point))) + (context-end (save-excursion + (goto-char start) + (vertical-motion 15) + (point))) + (context (buffer-substring-no-properties context-begin context-end)) + (input (format "Generate replacement text for the following INPUT given the PROMPT and the surrounding CONTEXT. In your response, delimit the suggested replacement with the markers and . +CONTEXT: %s +INPUT: %s +PROMPT: %s" + context + (buffer-substring-no-properties start end) + prompt))) + (llama-send-string-to-chat + (or name "*llama-replace*") + input + :callback (lambda (response) + (let ((replacement (with-temp-buffer + (insert response) + (goto-char (point-min)) + (let ((start (search-forward "" nil t)) + (end (search-forward "" nil t))) + (when (and start end) + (buffer-substring-no-properties start (- end 10))))))) + (if replacement + (when (y-or-n-p (format "Replace region with AI suggestion? %s" replacement)) + (with-current-buffer buffer + (delete-region start end) + (insert replacement))) + (message "AI did not generate a valid replacement."))))))) + (defun llama-replace-in-region (start end prompt &optional name) "Replace the region between START and END with the AI's response to PROMPT (require confirmation)." (interactive (list (region-beginning) (region-end) (read-string "Prompt: ") (if current-prefix-arg - (read-string "Chat buffer: ") - llama-chat-default-name))) - (let ((buffer (current-buffer)) - (input (format "Generate replacement text for the following INPUT given the PROMPT. In your response, delimit the suggested replacement with the markers and . + (completing-read "Chat buffer: " (mapcar (lambda (b) (buffer-name b)) + (match-buffers (lambda (b) + (with-current-buffer b + (eq major-mode 'llama-chat-mode)))))) + "*llama-replace*"))) + (let* ((buffer (current-buffer)) + (input (format "Generate replacement text for the following INPUT given the PROMPT. In your response, delimit the suggested replacement with the markers and . INPUT: %s -PROMPT: %s" (buffer-substring-no-properties start end) prompt))) +PROMPT: %s" + (buffer-substring-no-properties start end) + prompt))) (llama-send-string-to-chat - (or name llama-chat-default-name) + (or name "*llama-replace*") input :callback (lambda (response) (let ((replacement (with-temp-buffer