Make llama chat nicer

This commit is contained in:
Jeremy Dormitzer 2024-05-23 10:49:23 -04:00
parent 8ae6544709
commit e91b1d6480

View File

@ -31,6 +31,9 @@
(defvar llama-llm-provider nil
"The llm.el provider to use.")
(defvar llama-chat-prompt-symbol "> "
"The symbol used to indicate the user's input in the chat buffer.")
(defvar-local llama-current-chat-prompt nil
"Chat prompt object for the current buffer.")
@ -70,6 +73,10 @@
(funcall finish-callback))
(lambda (_ msg) (error "Error calling the LLM: %s" msg))))))))
(defun llama-chat-eldoc-function (cb)
(cond
((markdown-link-p) (funcall cb (markdown-link-url) :thing (nth 2 (markdown-link-at-pos (point)))))))
(defun llama-chat-mode-initialize ()
"Set up a new chat buffer."
(setq llama-user-input-begin-marker (make-marker)
@ -81,6 +88,14 @@
(set-marker llama-ai-response-begin-marker (point-max))
(set-marker llama-ai-response-end-marker (point-max)))
(defun llama-chat-self-insert-advice (&rest _)
"Makes sure that the point is within the user input zone whenever typing input."
(when (and (eq major-mode 'llama-chat-mode)
(< (point) (marker-position llama-user-input-begin-marker)))
(goto-char llama-user-input-end-marker)))
(advice-add 'self-insert-command :before #'llama-chat-self-insert-advice)
(define-derived-mode llama-chat-mode text-mode "Llama"
"Major mode for chatting with the AI."
:interactive nil
@ -93,6 +108,7 @@
(font-lock-syntactic-face-function . markdown-syntactic-face)
(font-lock-extra-managed-props
. (composition display invisible keymap help-echo mouse-face))))
(add-hook 'eldoc-documentation-functions #'llama-chat-eldoc-function nil t)
(llama-chat-mode-initialize))
(defun llama-chat-buffer-name ()
@ -105,8 +121,8 @@
(let ((inhibit-read-only t))
(save-excursion
(goto-char (point-max))
(insert (propertize "\n\n>" 'read-only t))
(insert (propertize " " 'read-only t 'rear-nonsticky '(read-only)))
(insert (propertize "\n\n" 'read-only t))
(insert (propertize llama-chat-prompt-symbol 'read-only t 'rear-nonsticky '(read-only)))
(set-marker llama-user-input-begin-marker (point))
(set-marker llama-user-input-end-marker llama-user-input-begin-marker)
(set-marker llama-ai-response-begin-marker (point))
@ -146,6 +162,43 @@
llama-ai-response-begin-marker
#'llama-ai-response-finished-callback))))
(defun llama-chat-follow-link ()
(interactive)
(cond
((markdown-link-p) (markdown-follow-link-at-point))))
(defun llama-chat-context-action ()
"Perform a contextual action in the chat buffer based on the point.
- follows the link at point
- submits the input if point is at the end of the buffer"
(interactive)
(cond
((markdown-link-p) (llama-chat-follow-link))
((and (>= (point) (marker-position llama-user-input-begin-marker))
(<= (point) (marker-position llama-user-input-end-marker)))
(llama-chat-send))))
(defun llama-chat-next-prompt ()
"Jump to the next prompt in the chat buffer."
(interactive)
(let ((found (save-excursion
(next-line)
(search-forward-regexp (rx line-start (literal llama-chat-prompt-symbol)) nil t))))
(when found
(goto-char found)
(beginning-of-line))))
(defun llama-chat-previous-prompt ()
"Jump to the previous prompt in the chat buffer."
(interactive)
(let ((found (save-excursion
(previous-line)
(search-backward-regexp (rx line-start (literal llama-chat-prompt-symbol)) nil t))))
(when found
(goto-char found)
(beginning-of-line))))
(defun llama-chat ()
"Start a chat with the AI."
(interactive)
@ -164,7 +217,19 @@
"Briefly greet the user without mentioning your name and ask how you can help."
:context "You are an empathetic therapist."))))
(define-key llama-chat-mode-map (kbd "RET") #'llama-chat-send)
(keymap-set llama-chat-mode-map "RET" #'llama-chat-context-action)
(keymap-set llama-chat-mode-map "S-<return>" #'newline)
(keymap-set llama-chat-mode-map "C-j" #'newline)
(keymap-set llama-chat-mode-map "C-c C-c" #'llama-chat-send)
(keymap-set llama-chat-mode-map "C-c C-n" #'llama-chat-next-prompt)
(keymap-set llama-chat-mode-map "C-c C-p" #'llama-chat-previous-prompt)
(keymap-set llama-chat-mode-map "M-n" #'markdown-next-link)
(keymap-set llama-chat-mode-map "M-p" #'markdown-previous-link)
(when (featurep 'evil)
(evil-define-key 'normal llama-chat-mode-map
(kbd "RET") #'llama-chat-follow-link
"[[" #'llama-chat-previous-prompt
"]]" #'llama-chat-next-prompt))
(provide 'llama)
;;; llama.el ends here