Add spinner indicating when the AI is responding, and inhibit input during that time

This commit is contained in:
Jeremy Dormitzer 2024-07-02 14:40:46 -04:00
parent 211904a190
commit 11755d6201

View File

@ -3,7 +3,7 @@
;; Copyright (C) 2024 Jeremy Isaac Dormitzer
;; Author: Jeremy Isaac Dormitzer <jeremy.dormitzer@gmail.com>
;; Package-Requires: ((emacs "28.1") (llm "0.15") (markdown-mode "2.7") (s "1.13"))
;; Package-Requires: ((emacs "28.1") (llm "0.15") (markdown-mode "2.7") (s "1.13") (spinner "1.7.4"))
;; This program is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
@ -25,6 +25,7 @@
;;; Code:
(require 'llm)
(require 's)
(require 'spinner)
;; for font-locking
(require 'markdown-mode)
@ -42,6 +43,9 @@
:context "You are a helpful AI assistant running inside the Emacs text editor.")
"The arguments for the default initial chat prompt.")
(defvar llama-spinner-type 'progress-bar
"The mode-line spinner type from spinner.el to use when waiting for the AI.")
(defvar-local llama-current-chat-prompt nil
"Chat prompt object for the current buffer.")
@ -60,6 +64,9 @@
(defvar-local llama-ai-response-end-marker nil
"Marker for the AI's response.")
(defvar-local llama-waiting-for-ai nil
"True if we are waiting for the AI to respond.")
(defun llama-chat-streaming-to-chat-buffer (provider prompt buffer point finish-callback)
"A version of `llm-chat-streaming-to-point' that sets inhibit-read-only to t in the insertion callback."
(with-current-buffer buffer
@ -70,6 +77,8 @@
(set-marker end point)
(set-marker-insertion-type start nil)
(set-marker-insertion-type end t)
(setq llama-waiting-for-ai t)
(spinner-start llama-spinner-type)
(cl-flet ((insert-text (text)
;; Erase and insert the new text between the marker cons.
(with-current-buffer (marker-buffer start)
@ -81,9 +90,14 @@
(llm-chat-streaming provider prompt
(lambda (text) (insert-text text))
(lambda (text)
(spinner-stop)
(insert-text text)
(funcall finish-callback text))
(lambda (_ msg) (error "Error calling the LLM: %s" msg))))))))
(funcall finish-callback text)
(setq llama-waiting-for-ai nil))
(lambda (_ msg)
(spinner-stop)
(setq llama-waiting-for-ai nil)
(error "Error calling the LLM: %s" msg))))))))
(defun llama-chat-eldoc-function (cb)
(cond
@ -220,7 +234,8 @@ it has been inserted into the chat buffer."
(cond
((markdown-link-p) (llama-chat-follow-link))
((and (>= (point) (marker-position llama-user-input-begin-marker))
(<= (point) (marker-position llama-user-input-end-marker)))
(<= (point) (marker-position llama-user-input-end-marker))
(not llama-waiting-for-ai))
(llama-chat-send))))
(defun llama-chat-next-prompt ()
@ -360,9 +375,12 @@ Do not include any additonal notes or commentary outside of the explanation sect
+ 1931
He was one of the most prominent football players of his time.
Wood was elected to the College Football Hall of Fame in 1980.")))))
(spinner-start llama-spinner-type)
(llm-chat-async llama-llm-provider
llm-prompt
(lambda (response)
(with-current-buffer buffer
(spinner-stop))
(with-temp-buffer
(insert response)
(goto-char (point-min))