Make a version of llama-replace-in-region that does not have context
This commit is contained in:
parent
6191654511
commit
8db9cc7e8f
@ -354,20 +354,73 @@ NAME defaults to `llama-chat-default-name'."
|
||||
input)
|
||||
(display-buffer (or name llama-chat-default-name))))
|
||||
|
||||
(defun llama-replace-in-region-with-context (start end prompt &optional name)
|
||||
"Replace the region between START and END with the AI's response to PROMPT (require confirmation).
|
||||
|
||||
Includes the surrounding buffer context in the prompt."
|
||||
(interactive (list (region-beginning)
|
||||
(region-end)
|
||||
(read-string "Prompt: ")
|
||||
(if current-prefix-arg
|
||||
(completing-read "Chat buffer: " (mapcar (lambda (b) (buffer-name b))
|
||||
(match-buffers (lambda (b)
|
||||
(with-current-buffer b
|
||||
(eq major-mode 'llama-chat-mode))))))
|
||||
"*llama-replace*")))
|
||||
(let* ((buffer (current-buffer))
|
||||
(context-begin (save-excursion
|
||||
(goto-char start)
|
||||
(vertical-motion -15)
|
||||
(point)))
|
||||
(context-end (save-excursion
|
||||
(goto-char start)
|
||||
(vertical-motion 15)
|
||||
(point)))
|
||||
(context (buffer-substring-no-properties context-begin context-end))
|
||||
(input (format "Generate replacement text for the following INPUT given the PROMPT and the surrounding CONTEXT. In your response, delimit the suggested replacement with the markers <REPLACE> and </REPLACE>.
|
||||
CONTEXT: %s
|
||||
INPUT: %s
|
||||
PROMPT: %s"
|
||||
context
|
||||
(buffer-substring-no-properties start end)
|
||||
prompt)))
|
||||
(llama-send-string-to-chat
|
||||
(or name "*llama-replace*")
|
||||
input
|
||||
:callback (lambda (response)
|
||||
(let ((replacement (with-temp-buffer
|
||||
(insert response)
|
||||
(goto-char (point-min))
|
||||
(let ((start (search-forward "<REPLACE>" nil t))
|
||||
(end (search-forward "</REPLACE>" nil t)))
|
||||
(when (and start end)
|
||||
(buffer-substring-no-properties start (- end 10)))))))
|
||||
(if replacement
|
||||
(when (y-or-n-p (format "Replace region with AI suggestion? %s" replacement))
|
||||
(with-current-buffer buffer
|
||||
(delete-region start end)
|
||||
(insert replacement)))
|
||||
(message "AI did not generate a valid replacement.")))))))
|
||||
|
||||
(defun llama-replace-in-region (start end prompt &optional name)
|
||||
"Replace the region between START and END with the AI's response to PROMPT (require confirmation)."
|
||||
(interactive (list (region-beginning)
|
||||
(region-end)
|
||||
(read-string "Prompt: ")
|
||||
(if current-prefix-arg
|
||||
(read-string "Chat buffer: ")
|
||||
llama-chat-default-name)))
|
||||
(let ((buffer (current-buffer))
|
||||
(input (format "Generate replacement text for the following INPUT given the PROMPT. In your response, delimit the suggested replacement with the markers <REPLACE> and </REPLACE>.
|
||||
(completing-read "Chat buffer: " (mapcar (lambda (b) (buffer-name b))
|
||||
(match-buffers (lambda (b)
|
||||
(with-current-buffer b
|
||||
(eq major-mode 'llama-chat-mode))))))
|
||||
"*llama-replace*")))
|
||||
(let* ((buffer (current-buffer))
|
||||
(input (format "Generate replacement text for the following INPUT given the PROMPT. In your response, delimit the suggested replacement with the markers <REPLACE> and </REPLACE>.
|
||||
INPUT: %s
|
||||
PROMPT: %s" (buffer-substring-no-properties start end) prompt)))
|
||||
PROMPT: %s"
|
||||
(buffer-substring-no-properties start end)
|
||||
prompt)))
|
||||
(llama-send-string-to-chat
|
||||
(or name llama-chat-default-name)
|
||||
(or name "*llama-replace*")
|
||||
input
|
||||
:callback (lambda (response)
|
||||
(let ((replacement (with-temp-buffer
|
||||
|
Loading…
Reference in New Issue
Block a user