Add comfy-ui command

This commit is contained in:
Jeremy Dormitzer 2024-06-05 13:13:57 -04:00
parent f1698e3247
commit 5169285be0
3 changed files with 51 additions and 12 deletions

View File

@ -0,0 +1,50 @@
;; -*- lexical-binding: t; -*-
(use-package llama
:straight `(:local-repo ,(expand-file-name "packages/llama" user-emacs-directory) :type nil)
:load-path "packages/llama"
:config
(require 'llm-ollama)
(setq llama-llm-provider (make-llm-ollama :chat-model "llama3:latest")
llm-warn-on-nonfree nil))
(defvar comfy-ui-path (expand-file-name "~/ComfyUI")
"Path to ComfyUI source repository.")
(defvar comfy-ui-command (list "pipenv" "run" "python" "main.py")
"Command to run ComfyUI server.")
(defvar-local comfy-ui--url nil
"URL for this buffer's ComfyUI process.")
(defun comfy-ui-process-filter (proc string)
(when-let ((match (s-match "To see the GUI go to: \\(.*\\)" string)))
(with-current-buffer (process-buffer proc)
(setq comfy-ui--url (nth 1 match))
(browse-url comfy-ui--url)))
(when (buffer-live-p (process-buffer proc))
(with-current-buffer (process-buffer proc)
(let ((moving (= (point) (process-mark proc))))
(save-excursion
;; Insert the text, advancing the process marker.
(goto-char (process-mark proc))
(insert string)
(set-marker (process-mark proc) (point)))
(if moving (goto-char (process-mark proc)))))))
(defun comfy-ui ()
"Launch Comfy UI in a subprocess and opens the web UI."
(interactive)
(unless (file-exists-p (expand-file-name (f-join comfy-ui-path "main.py")))
(user-error "Could not find ComfyUI installation!"))
(if-let ((proc (get-process "comfy-ui")))
(with-current-buffer (process-buffer proc)
(browse-url comfy-ui--url))
(with-temp-buffer
(cd comfy-ui-path)
(make-process :name "comfy-ui"
:buffer "*ComfyUI*"
:command comfy-ui-command
:filter #'comfy-ui-process-filter))))
(provide 'init-ai)

View File

@ -1,11 +0,0 @@
;; -*- lexical-binding: t; -*-
(use-package llama
:straight `(:local-repo ,(expand-file-name "packages/llama" user-emacs-directory) :type nil)
:load-path "packages/llama"
:config
(require 'llm-ollama)
(setq llama-llm-provider (make-llm-ollama :chat-model "llama3:latest")
llm-warn-on-nonfree nil))
(provide 'init-llm)

View File

@ -131,7 +131,7 @@
(require 'init-games)
(require 'handwriting)
(require 'init-navi)
(require 'init-llm)
(require 'init-ai)
(when (string-equal system-type "darwin")
(require 'init-mac))