Open a new Llama chat if you call llama-ask* without an existing chat
This commit is contained in:
parent
93bc223e5b
commit
78bc060018
@ -80,8 +80,9 @@
|
||||
(insert text))))))
|
||||
(llm-chat-streaming provider prompt
|
||||
(lambda (text) (insert-text text))
|
||||
(lambda (text) (insert-text text)
|
||||
(funcall finish-callback))
|
||||
(lambda (text)
|
||||
(insert-text text)
|
||||
(funcall finish-callback text))
|
||||
(lambda (_ msg) (error "Error calling the LLM: %s" msg))))))))
|
||||
|
||||
(defun llama-chat-eldoc-function (cb)
|
||||
@ -126,28 +127,33 @@
|
||||
(defun llama-chat-buffer-name ()
|
||||
"*llama-chat*")
|
||||
|
||||
(defun llama-ai-response-finished-callback ()
|
||||
(put-text-property llama-ai-response-begin-marker
|
||||
llama-ai-response-end-marker
|
||||
'read-only t)
|
||||
(let ((inhibit-read-only t))
|
||||
(save-excursion
|
||||
(goto-char (point-max))
|
||||
(insert (propertize "\n\n" 'read-only t))
|
||||
(insert (propertize llama-chat-prompt-symbol 'read-only t 'rear-nonsticky '(read-only)))
|
||||
(set-marker llama-user-input-begin-marker (point))
|
||||
(set-marker llama-user-input-end-marker llama-user-input-begin-marker)
|
||||
(set-marker llama-ai-response-begin-marker (point))
|
||||
(set-marker llama-ai-response-end-marker llama-ai-response-begin-marker)))
|
||||
(goto-char llama-user-input-begin-marker))
|
||||
(cl-defun llama-ai-response-finished-callback (&key callback)
|
||||
(lambda (text)
|
||||
(put-text-property llama-ai-response-begin-marker
|
||||
llama-ai-response-end-marker
|
||||
'read-only t)
|
||||
(let ((inhibit-read-only t))
|
||||
(save-excursion
|
||||
(goto-char (point-max))
|
||||
(insert (propertize "\n\n" 'read-only t))
|
||||
(insert (propertize llama-chat-prompt-symbol 'read-only t 'rear-nonsticky '(read-only)))
|
||||
(set-marker llama-user-input-begin-marker (point))
|
||||
(set-marker llama-user-input-end-marker llama-user-input-begin-marker)
|
||||
(set-marker llama-ai-response-begin-marker (point))
|
||||
(set-marker llama-ai-response-end-marker llama-ai-response-begin-marker)))
|
||||
(goto-char llama-user-input-begin-marker)
|
||||
(when callback (funcall callback text))))
|
||||
|
||||
(cl-defun llama-chat-send-prompt (name prompt &key filter)
|
||||
(cl-defun llama-chat-send-prompt (name prompt &key filter callback)
|
||||
"Send the PROMPT to the chat buffer named NAME.
|
||||
|
||||
If FILTER is provided, it should be a function that accepts the raw AI response
|
||||
and two callback arguments `insert' and `send'. In the filter function, call
|
||||
`insert' to insert text into the chat buffer or `send' to send additional text
|
||||
to the AI (e.g. to provide function call results)."
|
||||
to the AI (e.g. to provide function call results).
|
||||
|
||||
If CALLBACK is provided, it will be called with the raw AI response text after
|
||||
it has been inserted into the chat buffer."
|
||||
(with-current-buffer name
|
||||
(if filter
|
||||
(cl-flet ((insert (text)
|
||||
@ -155,7 +161,7 @@ to the AI (e.g. to provide function call results)."
|
||||
(save-excursion
|
||||
(goto-chat llama-ai-response-begin-marker)
|
||||
(insert text)))
|
||||
(llama-ai-response-finished-callback))
|
||||
(funcall (llama-ai-response-finished-callback :callback callback) text))
|
||||
(send (text)
|
||||
(llm-chat-prompt-append-response prompt text)
|
||||
(llama-chat-send-prompt name prompt :filter filter)))
|
||||
@ -166,9 +172,9 @@ to the AI (e.g. to provide function call results)."
|
||||
prompt
|
||||
(current-buffer)
|
||||
llama-ai-response-begin-marker
|
||||
#'llama-ai-response-finished-callback))))
|
||||
(llama-ai-response-finished-callback :callback callback)))))
|
||||
|
||||
(cl-defun llama-chat-buffer (name prompt &key provider filter)
|
||||
(cl-defun llama-chat-buffer (name prompt &key provider filter callback)
|
||||
(let ((buffer (get-buffer-create name)))
|
||||
(with-current-buffer buffer
|
||||
(unless (eq major-mode 'llama-chat-mode)
|
||||
@ -178,7 +184,7 @@ to the AI (e.g. to provide function call results)."
|
||||
(when filter
|
||||
(setq-local llama-current-chat-filter filter))
|
||||
(setq llama-current-chat-prompt prompt)
|
||||
(llama-chat-send-prompt name prompt :filter filter)))
|
||||
(llama-chat-send-prompt name prompt :filter filter :callback callback)))
|
||||
buffer))
|
||||
|
||||
(defun llama-chat-send ()
|
||||
@ -243,7 +249,7 @@ to the AI (e.g. to provide function call results)."
|
||||
llama-chat-default-name
|
||||
(apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args))))
|
||||
|
||||
(defun llama-doctor()
|
||||
(defun llama-doctor ()
|
||||
"Start a psycotherapy session with the AI."
|
||||
(interactive)
|
||||
(pop-to-buffer (llama-chat-buffer
|
||||
@ -271,18 +277,23 @@ to the AI (e.g. to provide function call results)."
|
||||
|
||||
If USER-VISIBLE-STRING is provided, display that as the user input in the chat
|
||||
buffer instead of the original string."
|
||||
(unless (get-buffer name)
|
||||
(error "No chat buffer named %s" name))
|
||||
(with-current-buffer name
|
||||
(save-excursion
|
||||
(let ((inhibit-read-only t))
|
||||
(goto-char llama-user-input-begin-marker)
|
||||
(insert (or user-visible-string string))
|
||||
(goto-char llama-user-input-end-marker)
|
||||
(insert (propertize "\n\n" 'read-only t))
|
||||
(set-marker llama-ai-response-begin-marker (point))))
|
||||
(llm-chat-prompt-append-response llama-current-chat-prompt string)
|
||||
(llama-chat-send-prompt name llama-current-chat-prompt :filter llama-current-chat-filter)))
|
||||
(cl-flet ((send (&rest _args)
|
||||
(with-current-buffer name
|
||||
(save-excursion
|
||||
(let ((inhibit-read-only t))
|
||||
(goto-char llama-user-input-begin-marker)
|
||||
(insert (or user-visible-string string))
|
||||
(goto-char llama-user-input-end-marker)
|
||||
(insert (propertize "\n\n" 'read-only t))
|
||||
(set-marker llama-ai-response-begin-marker (point))))
|
||||
(llm-chat-prompt-append-response llama-current-chat-prompt string)
|
||||
(llama-chat-send-prompt name llama-current-chat-prompt :filter llama-current-chat-filter))))
|
||||
(if (get-buffer name)
|
||||
(send)
|
||||
(pop-to-buffer (llama-chat-buffer
|
||||
name
|
||||
(apply #'llm-make-chat-prompt llama-chat-default-initial-prompt-args)
|
||||
:callback #'send)))))
|
||||
|
||||
(defun llama-ask-region (start end prompt &optional name)
|
||||
"Ask the AI in buffer NAME the PROMPT about the region between START and END.
|
||||
|
Loading…
Reference in New Issue
Block a user