330 lines
13 KiB
EmacsLisp
330 lines
13 KiB
EmacsLisp
;; -*- lexical-binding: t; -*-
|
||
|
||
(defvar ai-map (make-sparse-keymap))
|
||
(define-key leader-map "a" (cons "ai" ai-map))
|
||
|
||
(use-package gptel
|
||
:commands (gptel-request)
|
||
:config
|
||
(defvar gptel-backend-openai (gptel-make-openai "ChatGPT"
|
||
:key #'gptel-api-key-from-auth-source
|
||
:stream t
|
||
:models '("gpt-4o"
|
||
"gpt-4o-mini")))
|
||
(defvar gptel-backend-ollama (gptel-make-ollama "Ollama"
|
||
:stream t
|
||
:models '("llama3.1:latest"
|
||
"mistral-nemo:latest"
|
||
"gemma2:2b")))
|
||
(setq gptel-backend gptel-backend-openai
|
||
gptel-model "gpt-4o")
|
||
(defun gptel-select-backend (backend)
|
||
(interactive (list (cl-loop
|
||
for (name . backend) in gptel--known-backends
|
||
nconc (cl-loop for model in (gptel-backend-models backend)
|
||
collect (list (concat name ":" model) backend model))
|
||
into models-alist finally return
|
||
(cdr (assoc (completing-read "Backend: " models-alist nil t)
|
||
models-alist)))))
|
||
(setq gptel-backend (car backend)
|
||
gptel-model (cadr backend)))
|
||
(add-to-list 'gptel-directives '(shell-command . "You are a command line helper. Generate shell commands that do what is requested, without any additional description or explanation. Reply in plain text with no Markdown or other syntax. Reply with the command only."))
|
||
(add-to-list 'gptel-directives '(org-mode . "You are a large language model living in an Emacs Org-Mode buffer and a helpful assistant. You may evaluate Emacs Lisp, Python, and shell-script code when necessary by outputting an Org-Mode source block. You don't need to ask for confirmation before evaluating code. The user will execute the source block and display the results in the buffer. Respond concisely.
|
||
|
||
Some notes on code evaluation:
|
||
|
||
Source code blocks should have the following format:
|
||
#+begin_src <lang>
|
||
<code>
|
||
#+end_src
|
||
|
||
where <lang> is the language of the code block, e.g., emacs-lisp, python, or sh.
|
||
|
||
Do not return the results of the source block - the user will evaluate the code and display the results in the buffer.
|
||
|
||
You can use Emacs Lisp code blocks to evaluate code in the Emacs process you are running in, for example to open files for the user. For Emacs Lisp code blocks, the return value will be whatever the last expression in the block evaluates to, e.g.:
|
||
|
||
#+begin_src emacs-lisp
|
||
(+ 1 2)
|
||
#+end_src
|
||
|
||
#+RESULTS:
|
||
: 3
|
||
|
||
For shell-script code blocks, the return value will be the output of the script, e.g.:
|
||
|
||
#+begin_src sh
|
||
echo foo
|
||
echo bar
|
||
#+end_src
|
||
|
||
#+RESULTS:
|
||
| foo |
|
||
| bar |
|
||
|
||
For Python code blocks, you only have access to the Python standard library, and cannot use any third-party libraries. Additionally, the return value needs to be explicitly returned using the return keyword, e.g.:
|
||
|
||
#+begin_src python
|
||
return 1 + 2
|
||
#+end_src
|
||
|
||
#+RESULTS:
|
||
: 3
|
||
|
||
Here are some examples of your task:
|
||
|
||
User: What's the current date and time?
|
||
|
||
Assistant:
|
||
#+begin_src emacs-lisp
|
||
(format-time-string \"%Y-%m-%d %H:%M:%S\")
|
||
#+end_src
|
||
|
||
User:
|
||
#+RESULTS:
|
||
: 2024-08-07 15:26:55
|
||
|
||
User: Can you find the square root of 144 in Python?
|
||
|
||
Assistant:
|
||
#+begin_src python
|
||
import math
|
||
return math.sqrt(144)
|
||
#+end_src
|
||
|
||
User:
|
||
#+RESULTS:
|
||
: 12.0
|
||
|
||
User: List all files in the current directory.
|
||
|
||
Assistant:
|
||
#+begin_src sh
|
||
ls
|
||
#+end_src
|
||
|
||
User:
|
||
#+RESULTS:
|
||
| Dockerfile |
|
||
| Gemfile |
|
||
| Gemfile.lock |
|
||
| README.md |
|
||
| Rakefile |
|
||
| app |
|
||
| bin |
|
||
| db |
|
||
| demo |
|
||
| docker |
|
||
| docs |
|
||
| lib |
|
||
|
||
User: What is the capital of France?
|
||
|
||
Assistant: The capital of France is Paris.
|
||
|
||
User:
|
||
Convert 68 degrees F to C
|
||
|
||
Assistant:
|
||
#+begin_src python
|
||
def fahrenheit_to_celsius(f):
|
||
return (f - 32) * 5.0/9.0
|
||
|
||
return fahrenheit_to_celsius(68)
|
||
#+end_src
|
||
|
||
User:
|
||
#+RESULTS:
|
||
: 20.0
|
||
|
||
User: How do I search for a string in Emacs?
|
||
|
||
Assistant: You can search for a string in Emacs by using =C-s= (Control + s) to start an incremental search. As you type the string you want to search, Emacs will highlight matches in real-time. To find the next occurrence, press =C-s= again. If you want to search backwards, use =C-r= (Control + r)."))
|
||
(add-to-list 'gptel-directives '(stable-diffusion . "You are an AI assistant specialized in creating precise and detailed prompts for stable diffusion image generators. When given a natural language input describing a desired image, you will generate a clear, concise, and highly descriptive prompt that includes key elements such as subjects, actions, environments, styles, lighting, and other relevant details to ensure high-quality image generation.
|
||
|
||
Example input: \"A fantasy landscape with a castle and dragons\"
|
||
Example output: \"A majestic medieval castle perched on a hilltop, surrounded by flying dragons, under a moonlit sky, with a serene forest in the background. Fantasy art style, detailed architecture, and vibrant colors.\""))
|
||
(add-to-list 'gptel-directives '(code-review . "You are a code reviewer. Provide feedback on the code snippet below. Highlight any issues, suggest improvements, and provide explanations for your suggestions. Respond in plain text with no Markdown or other syntax."))
|
||
(add-to-list 'gptel-directives '(prompt-generator . "
|
||
You are an advanced language model designed to generate effective, clear, and contextually appropriate prompts for other language models. Your goals are to:
|
||
|
||
1. Understand the specific use case or goal provided.
|
||
2. Generate prompts that are clear, specific, and actionable.
|
||
3. Ensure that prompts are open-ended enough to allow for creativity, yet focused enough to produce relevant responses.
|
||
4. Maintain a formal and professional tone unless instructed otherwise.
|
||
5. Tailor prompts to maximize the potential and unique capabilities of the language models they are intended for.
|
||
|
||
Here’s an example of your task:
|
||
|
||
User: Write a prompt to write a creative short story involving a dragon.
|
||
Assistant: Write a short story about a dragon who discovers a hidden talent that surprises everyone in the dragon kingdom. Describe the dragon's journey and the reactions of those around it."))
|
||
(defun gptel-org (buffer &optional _ initial interactivep)
|
||
(interactive (let* ((backend (default-value 'gptel-backend))
|
||
(backend-name
|
||
(format "*%s*" (gptel-backend-name backend))))
|
||
(list (read-buffer "Create or choose gptel buffer: "
|
||
(generate-new-buffer-name backend-name) nil ; DEFAULT and REQUIRE-MATCH
|
||
(lambda (b) ; PREDICATE
|
||
(let ((buffer (get-buffer (or (car-safe b) b))))
|
||
(and
|
||
(with-current-buffer buffer (eq major-mode 'org-mode))
|
||
(buffer-local-value 'gptel-mode buffer)))))
|
||
(condition-case nil
|
||
(gptel--get-api-key
|
||
(gptel-backend-key backend))
|
||
((error user-error)
|
||
(setq gptel-api-key
|
||
(read-passwd
|
||
(format "%s API key: " backend-name)))))
|
||
(and (use-region-p)
|
||
(buffer-substring (region-beginning)
|
||
(region-end)))
|
||
t)))
|
||
(let ((gptel-default-mode #'org-mode))
|
||
(gptel buffer nil initial interactivep))
|
||
(with-current-buffer buffer
|
||
(setq-local gptel--system-message (alist-get 'org-mode gptel-directives))))
|
||
(defun gptel-chat-with-buffer (&optional arg interactivep)
|
||
(interactive (list current-prefix-arg t))
|
||
(let* ((name (format "*gptel: %s*"(buffer-name)))
|
||
(buffer (if arg
|
||
(generate-new-buffer name)
|
||
name)))
|
||
(gptel buffer nil (buffer-string) interactivep)
|
||
(with-current-buffer buffer
|
||
(goto-char (point-max))
|
||
(newline)
|
||
(insert (gptel-prompt-prefix-string)))))
|
||
:general
|
||
("C-c RET" #'gptel-send
|
||
"C-c C-<return>" #'gptel-menu)
|
||
(ai-map
|
||
"g" #'gptel
|
||
"o" #'gptel-org
|
||
"s" #'gptel-send
|
||
"m" #'gptel-menu
|
||
"b" #'gptel-chat-with-buffer
|
||
"B" #'gptel-select-backend
|
||
"a" #'gptel-context-add
|
||
"f" #'gptel-context-add-file
|
||
"k" #'gptel-abort))
|
||
|
||
(use-package gptel-quick
|
||
:straight (:type git :host github :repo "karthink/gptel-quick")
|
||
:commands gptel-quick
|
||
:general
|
||
(embark-general-map
|
||
"?" #'gptel-quick)
|
||
(ai-map
|
||
"?" #'gptel-quick))
|
||
|
||
(use-package aimenu
|
||
:straight `(:local-repo ,(expand-file-name "packages/aimenu" user-emacs-directory))
|
||
:defer t
|
||
:config
|
||
(setq aimenu-backend gptel-backend-ollama
|
||
aimenu-model "gemma2:2b")
|
||
:general
|
||
(ai-map "i" #'aimenu))
|
||
|
||
(defun gptel-commit-message ()
|
||
"Generate a commit message via gptel."
|
||
(interactive)
|
||
(unless git-commit-mode
|
||
(user-error "Not in a git commit buffer!"))
|
||
(let* ((diff-buf (magit-get-mode-buffer 'magit-diff-mode))
|
||
(diff (with-current-buffer diff-buf
|
||
(buffer-substring-no-properties
|
||
(point-min)
|
||
;; Skip the last line, which is just the [back] button
|
||
(save-excursion
|
||
(goto-char (point-max))
|
||
(forward-line -1)
|
||
(point)))))
|
||
(prompt (format "%s\n\nWrite a clear, concise commit message for this diff. The first line should succinctly summarize the changes made and should be no more than 50 characters. If additional context is needed, include it in an additional paragraph separate by a blank line from the first line. Do not use the word 'enhance' or talk about the user experience. Reply in plain text with no Markdown or other syntax. Reply with the commit message only." diff)))
|
||
(message "Generating commit message...")
|
||
(gptel-request prompt
|
||
:stream t
|
||
:system "You are a professional software engineer.")))
|
||
|
||
(with-eval-after-load 'git-commit
|
||
(keymap-set git-commit-mode-map "C-c RET" #'gptel-commit-message))
|
||
|
||
(defvar comfy-ui-path (expand-file-name "~/ComfyUI")
|
||
"Path to ComfyUI source repository.")
|
||
|
||
(defvar comfy-ui-command (list "pipenv" "run" "python" "main.py")
|
||
"Command to run ComfyUI server.")
|
||
|
||
(defvar-local comfy-ui--url nil
|
||
"URL for this buffer's ComfyUI process.")
|
||
|
||
(defun comfy-ui-process-filter (proc string)
|
||
(when-let ((match (s-match "To see the GUI go to: \\(.*\\)" string)))
|
||
(with-current-buffer (process-buffer proc)
|
||
(setq comfy-ui--url (nth 1 match))
|
||
(let ((browse-url-browser-function #'browse-url-default-browser))
|
||
(browse-url comfy-ui--url))))
|
||
(when (buffer-live-p (process-buffer proc))
|
||
(with-current-buffer (process-buffer proc)
|
||
(let ((moving (= (point) (process-mark proc))))
|
||
(save-excursion
|
||
;; Insert the text, advancing the process marker.
|
||
(goto-char (process-mark proc))
|
||
(insert string)
|
||
(set-marker (process-mark proc) (point)))
|
||
(if moving (goto-char (process-mark proc)))))))
|
||
|
||
(defun comfy-ui ()
|
||
"Launch Comfy UI in a subprocess and opens the web UI."
|
||
(interactive)
|
||
(unless (file-exists-p (expand-file-name (f-join comfy-ui-path "main.py")))
|
||
(user-error "Could not find ComfyUI installation!"))
|
||
(if-let ((proc (get-process "comfy-ui")))
|
||
(with-current-buffer (process-buffer proc)
|
||
(browse-url comfy-ui--url))
|
||
(with-temp-buffer
|
||
(cd comfy-ui-path)
|
||
(make-process :name "comfy-ui"
|
||
:buffer "*ComfyUI*"
|
||
:command comfy-ui-command
|
||
:filter #'comfy-ui-process-filter))))
|
||
|
||
(defvar ollama-copilot-proxy-port 11435
|
||
"Port for the Ollama Copilot proxy server.")
|
||
|
||
(defvar ollama-copilot-model "codellama:code"
|
||
"Model for the Ollama Copilot proxy server.")
|
||
|
||
(defun ollama-copilot-ensure ()
|
||
"Start the Ollama Copilot proxy server if it's not already running."
|
||
(let ((proc-name "ollama-copilot"))
|
||
(unless (get-process proc-name)
|
||
(unless (executable-find "ollama-copilot")
|
||
(user-error "Could not find ollama-copilot executable!"))
|
||
(make-process :name proc-name
|
||
:buffer (format "*%s*" proc-name)
|
||
:command `("ollama-copilot"
|
||
"-proxy-port" ,(format ":%s" ollama-copilot-proxy-port)
|
||
"-model" ,ollama-copilot-model)))))
|
||
|
||
(defvar ollama-copilot--proxy-cache nil
|
||
"Internal variable to cache the old proxy value.")
|
||
|
||
(define-minor-mode ollama-copilot-mode
|
||
"Minor mode to use ollama-copilot as a local Copilot proxy."
|
||
:global t
|
||
(require 'copilot)
|
||
(if ollama-copilot-mode
|
||
(progn
|
||
(ollama-copilot-ensure)
|
||
(setq ollama-copilot--proxy-cache copilot-network-proxy)
|
||
(setq copilot-network-proxy `(:host "127.0.0.1"
|
||
:port ,ollama-copilot-proxy-port
|
||
:rejectUnauthorized :json-false))
|
||
(copilot-diagnose))
|
||
(setq copilot-network-proxy ollama-copilot--proxy-cache)
|
||
(copilot-diagnose)))
|
||
|
||
(provide 'init-ai)
|