summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--llm-chat-api.el142
-rw-r--r--llm-chat-claude.el77
-rw-r--r--llm-chat-deepseek.el77
-rw-r--r--llm-chat-gemini.el157
-rw-r--r--llm-chat-openai.el58
-rw-r--r--llm-chat-replacements.el55
-rw-r--r--llm-chat.el93
7 files changed, 652 insertions, 7 deletions
diff --git a/llm-chat-api.el b/llm-chat-api.el
new file mode 100644
index 0000000..96082f8
--- /dev/null
+++ b/llm-chat-api.el
@@ -0,0 +1,142 @@
+;;; llm-chat-api.el --- Common interfaces to use -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; Code:
+
+(defvar llm-chat-system-prompt "You are a helpful bot. You make concise accurate responses. You only give the choice you think is best - not multiple examples. You are a professional at creating custom operating systems. Excelling at performance and optimization for running programs on low spec devices that still provide the best user experience")
+(defvar llm-chat-history `(((role . "system") (content . ,llm-chat-system-prompt))))
+(defvar llm-chat-model nil)
+(defvar llm-chat-temperature 0.3)
+(defvar llm-chat-stream t)
+(defvar llm-chat-buffer nil)
+(defvar llm-chat--incomplete "")
+
+(defun llm-chat-get-system-prompt ()
+ llm-chat-system-prompt)
+
+(defun llm-chat-set-system-prompt (prompt)
+ (setf (cdr (assoc 'content (car llm-chat-history))) prompt)
+ (setq llm-chat-system-prompt prompt))
+
+(defun llm-chat-get-history ()
+ llm-chat-history)
+
+(defun llm-chat-add-history (entry)
+ (setq llm-chat-history (append llm-chat-history entry)))
+
+(defun llm-chat-add-user-history (prompt)
+ (llm-chat-add-history `(((role . "user") (content . ,prompt)))))
+
+(defun llm-chat-add-assistant-history (prompt)
+ (llm-chat-add-history `(((role . "assistant") (content . ,prompt)))))
+
+(defun llm-chat-reset-history ()
+ (interactive)
+ (setq llm-chat-history `(((role . "system") (content . ,(llm-chat-get-system-prompt)))))
+ (message "history has been reset to current system prompt"))
+
+(defun llm-chat-save-history (path))
+
+(defun llm-chat-load-history (path))
+
+(defun llm-chat-get-model ()
+ llm-chat-model)
+
+(defun llm-chat-set-model (model)
+ (setq llm-chat-model model)
+ (message (concat "model set to " llm-chat-model)))
+
+(defun llm-chat-get-temperature ()
+ llm-chat-temperature)
+
+(defun llm-chat-set-temperature (temp)
+ (interactive "nenter a value between 0.0 and 1.0: ")
+ (if (and (>= temp 0.0) (<= temp 1.0))
+ (setq llm-chat-temperature temp)
+ (message "temperature must be between 0.0 and 1.0")))
+
+(defun llm-chat-get-stream? ()
+ llm-chat-stream)
+
+(defun llm-chat-set-stream (stream?)
+ (setq llm-chat-stream stream?))
+
+(defun llm-chat-get-buffer ()
+ llm-chat-buffer)
+
+(defun llm-chat-set-buffer (buffer)
+ (setq llm-chat-buffer buffer))
+
+(defun llm-chat-insert-user-prompt ()
+ (interactive)
+ (insert "(USR):"))
+
+(defun llm-chat-insert-system-prompt ()
+ (interactive)
+ (insert "(SYS):"))
+
+(cl-defstruct llm-chat-backend
+ api-endpoint
+ api-key
+ headers-fn
+ json-fn
+ filter-fn)
+
+(cl-defgeneric llm-chat-resolve-model-backend (backend model))
+
+(cl-defgeneric llm-chat-request (backend prompt)
+ "This is to make a llm api request")
+
+(cl-defmethod llm-chat-request ((backend llm-chat-backend) prompt)
+ (llm-chat-set-buffer nil)
+ (llm-chat-add-user-history prompt)
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (unless (derived-mode-p 'markdown-mode)
+ (markdown-mode)))
+ (let ((temp-file (make-temp-file "llm-chat-request-" nil ".json"))
+ (headers-fn (llm-chat-backend-headers-fn backend))
+ (json-fn (llm-chat-backend-json-fn backend))
+ (filter-fn (llm-chat-backend-filter-fn backend)))
+ (with-temp-file temp-file
+ (insert (funcall json-fn)))
+ (make-process
+ :name "llm-chat-stream"
+ :command (append (list "curl" "--no-buffer" "-X" "POST"
+ "-H" "Content-Type: application/json"
+ "-d" (concat "@" temp-file)
+ (llm-chat-backend-api-endpoint backend))
+ (funcall headers-fn backend))
+ :filter filter-fn)))
+
+(defun llm-chat-data-filter (proc string backend-finished-p-fn backend-parse-content-fn)
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (save-excursion
+ (goto-char (point-max))
+ (let ((data (concat llm-chat--incomplete string))
+ (start 0)
+ matches)
+ (while (string-match "data: \\(.*\\)\n\n" data start)
+ (write-region data nil "/tmp/stream.txt" t)
+ (push (match-string 1 data) matches)
+ (setq start (match-end 0)))
+ (setq llm-chat--incomplete (substring data start))
+ (dolist (match (nreverse matches))
+ (if (funcall backend-finished-p-fn match)
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(USR):")
+ (llm-chat-add-assistant-history (llm-chat-get-buffer))
+ (llm-chat-set-buffer nil)
+ (setq llm-chat--incomplete ""))
+ (when-let ((content (funcall backend-parse-content-fn match)))
+ (if (not (llm-chat-get-buffer))
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(AI): ")))
+ (insert content)
+ (llm-chat-set-buffer (concat (llm-chat-get-buffer) content)))))))))
+
+(provide 'llm-chat-api)
+
+;;; llm-chat-api.el ends here
diff --git a/llm-chat-claude.el b/llm-chat-claude.el
new file mode 100644
index 0000000..4807fa9
--- /dev/null
+++ b/llm-chat-claude.el
@@ -0,0 +1,77 @@
+;;; llm-chat-claude.el --- Implements claude llm intergration -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; License:
+
+;; llm-chat-claude.el is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+
+;; llm-chat-claude.el is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+;;; Code:
+
+(require 'llm-chat-api)
+
+(defvar llm-chat-claude-models '("claude-3-7-sonnet-latest" "claude-3-5-haiku-latest" "claude-3-5-sonnet-latest" "claude-3-sonnet-20240229" "claude-3-haiku-20240307"))
+
+(cl-defgeneric llm-chat-resolve-model-backend ((backend (eql 'claude))
+ model)
+ (if (member model llm-chat-claude-models)
+ t))
+
+(defun llm-chat-claude-get-model ()
+ (if (member (llm-chat-get-model) llm-chat-claude-models)
+ (llm-chat-get-model)
+ "claude-3-haiku-20240307"))
+
+(defun llm-chat-make-claude-backend ()
+ (make-llm-chat-backend
+ :api-endpoint "https://api.anthropic.com/v1/messages"
+ :api-key (auth-source-pick-first-password :host "api.anthropic.com")
+ :headers-fn #'llm-chat-claude-headers
+ :json-fn #'llm-chat-claude-json
+ :filter-fn #'llm-chat-claude-filter))
+
+(defun llm-chat-claude-json ()
+ (let ((json-object `((model . ,(llm-chat-claude-get-model))
+ (system . ,(llm-chat-get-system-prompt))
+ (messages . ,(cdr (llm-chat-get-history)))
+ (temperature . ,(llm-chat-get-temperature))
+ (max_tokens . 4096)
+ (stream . ,(llm-chat-get-stream?)))))
+ (json-encode json-object)))
+
+(defun llm-chat-claude-headers (backend)
+ (list "-H" "anthropic-version: 2023-06-01"
+ "-H" (concat "x-api-key: " (llm-chat-backend-api-key backend))))
+
+(defun llm-chat-claude-filter (proc string)
+ (llm-chat-data-filter proc string
+ #'llm-chat-claude-finished-p
+ #'llm-chat-claude-parse-content))
+
+(defun llm-chat-claude-finished-p (string)
+ (let* ((json-object (json-read-from-string string))
+ (type (alist-get 'type json-object)))
+ (string-equal type "message_stop")))
+
+(defun llm-chat-claude-parse-content (string)
+ (when-let* ((json-object (json-read-from-string string))
+ (type (alist-get 'type json-object))
+ (delta (alist-get 'delta json-object))
+ (text (alist-get 'text delta)))
+ (when (string-equal type "content_block_delta")
+ text)))
+
+(provide 'llm-chat-claude)
+
+;;; llm-chat-claude.el ends here
diff --git a/llm-chat-deepseek.el b/llm-chat-deepseek.el
new file mode 100644
index 0000000..60ff763
--- /dev/null
+++ b/llm-chat-deepseek.el
@@ -0,0 +1,77 @@
+;;; llm-chat-deepseek.el --- Implements deepseek llm intergration -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; License:
+
+;; llm-chat-deepseek.el is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+
+;; llm-chat-deepseek.el is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+;;; Code:
+
+(require 'llm-chat-api)
+
+(defvar llm-chat-deepseek-models '("deepseek-chat" "deepseek-coder" "deepseek-reasoner"))
+
+(cl-defgeneric llm-chat-resolve-model-backend ((backend (eql 'deepseek))
+ model)
+ (if (member model llm-chat-deepseek-models)
+ t))
+
+(defun llm-chat-deepseek-get-model ()
+ (if (member (llm-chat-get-model) llm-chat-deepseek-models)
+ (llm-chat-get-model)
+ "deepseek-coder"))
+
+(defun llm-chat-make-deepseek-backend ()
+ (make-llm-chat-backend
+ :api-endpoint "https://api.deepseek.com/chat/completions"
+ :api-key (auth-source-pick-first-password :host "api.deepseek.com")
+ :headers-fn #'llm-chat-deepseek-headers
+ :json-fn #'llm-chat-deepseek-json
+ :filter-fn #'llm-chat-deepseek-filter))
+
+(defun llm-chat-deepseek-json ()
+ (let ((json-object `((model . ,(llm-chat-deepseek-get-model))
+ (messages . ,(llm-chat-get-history))
+ (temperature . ,(llm-chat-get-temperature))
+ (stream . ,(llm-chat-get-stream?)))))
+ (json-encode json-object)))
+
+(defun llm-chat-deepseek-headers (backend)
+ (list "-H" (concat "Authorization: " (concat "Bearer "
+ (llm-chat-backend-api-key backend)))))
+
+(defun llm-chat-deepseek-filter (proc string)
+ (llm-chat-data-filter proc string
+ #'llm-chat-deepseek-finished-p
+ #'llm-chat-deepseek-parse-content))
+
+(defun llm-chat-deepseek-finished-p (match)
+ (string-equal "[DONE]" match))
+
+(defun llm-chat-deepseek-parse-content (string)
+ (let* ((json-object (json-read-from-string string))
+ (choices (alist-get 'choices json-object))
+ (first-choice (aref choices 0))
+ (delta (alist-get 'delta first-choice))
+ (content (alist-get 'content delta nil))
+ (reasoning-content (when (assoc 'reasoning_content delta)
+ (alist-get 'reasoning_content delta nil))))
+ (cond
+ ((and (stringp content) (not (equal content "null"))) content)
+ ((stringp reasoning-content) reasoning-content))))
+
+(provide 'llm-chat-deepseek)
+
+;;; llm-chat-deepseek.el ends here
diff --git a/llm-chat-gemini.el b/llm-chat-gemini.el
new file mode 100644
index 0000000..5d4ae1b
--- /dev/null
+++ b/llm-chat-gemini.el
@@ -0,0 +1,157 @@
+;;; llm-chat-gemini.el --- Implements gemini llm intergration -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; License:
+
+;; llm-chat-gemini.el is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+
+;; llm-chat-gemini.el is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+;;; Code:
+
+(require 'llm-chat-api)
+
+(defvar llm-chat-gemini-models '("gemini-2.5-flash-preview-04-17" "gemini-2.5-pro-preview-03-25" "gemini-2.0-flash-lite" "gemini-1.5-flash" "gemini-1.5-flash-8b" "gemini-1.5-pro"))
+
+(cl-defgeneric llm-chat-resolve-model-backend ((backend (eql 'gemini))
+ model)
+ (if (member model llm-chat-gemini-models)
+ t))
+
+(defun llm-chat-gemini-get-model ()
+ (if (member (llm-chat-get-model) llm-chat-gemini-models)
+ (llm-chat-get-model)
+ "gemini-2.0-flash-lite"))
+
+(defun llm-chat-gemini-convert-history-contents ()
+ (let ((contents '()))
+ (dolist (history (llm-chat-get-history))
+ (let ((role (cdr (car history)))
+ (text (cdr (car (cdr history)))))
+
+ (cond ((or (string= role "user") (string= role "system"))
+ (setq contents (append contents `((role . "user")
+ (parts . (((text . ,text))))))))
+ ((string= role "assistant")
+ (setq contents (append contents `((role . "model")
+ (parts . (((text . ,text)))))))))))
+ contents))
+
+(defun llm-chat-gemini-json ()
+ (let ((json-object `((contents . (,(llm-chat-gemini-convert-history-contents)))
+ (generation_config . ((temperature . ,(llm-chat-get-temperature)))))))
+ (json-encode json-object)))
+
+(defun llm-chat-gemini-headers (backend)
+ ())
+
+(defun llm-chat-make-gemini-backend ()
+ (make-llm-chat-backend
+ :api-endpoint (concat "https://generativelanguage.googleapis.com/v1beta/models/"
+ (llm-chat-gemini-get-model)
+ ":streamGenerateContent?key="
+ (auth-source-pick-first-password :host "api.gemini.com"))
+ :api-key (auth-source-pick-first-password :host "api.gemini.com")
+ :headers-fn #'llm-chat-gemini-headers
+ :json-fn #'llm-chat-gemini-json
+ :filter-fn #'llm-chat-gemini-filter))
+
+(defun llm-chat-gemini-filter (proc string)
+ "Buffer incomplete JSON until a complete object is received, then parse."
+ (setq llm-chat--incomplete (concat llm-chat--incomplete string))
+ ;; Try to find a complete JSON object (simple heuristic: first { to last })
+ (write-region string nil "/tmp/stream.txt" t)
+ (when (and (string-match "{" llm-chat--incomplete)
+ (string-match "}[[:space:]]*\\'" llm-chat--incomplete))
+ (let ((json-str (substring llm-chat--incomplete
+ (string-match "{" llm-chat--incomplete))))
+ (condition-case err
+ (let* ((json-object (json-read-from-string json-str))
+ (candidates (alist-get 'candidates json-object))
+ (first-candidate (if (vectorp candidates) (aref candidates 0) nil))
+ (content (when first-candidate
+ (alist-get 'content first-candidate)))
+ (parts (when content (alist-get 'parts content)))
+ (text-part (when (and parts (vectorp parts) (> (length parts) 0))
+ (alist-get 'text (aref parts 0))))
+ (finish-reason (when first-candidate
+ (alist-get 'finishReason first-candidate))))
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (save-excursion
+ (goto-char (point-max))
+ (when text-part
+ (if (not (llm-chat-get-buffer))
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(AI): ")))
+ (insert text-part)
+ (llm-chat-set-buffer (concat (or (llm-chat-get-buffer) "") text-part)))
+ (when (and finish-reason (string= finish-reason "STOP"))
+ (if (not (bolp)) (insert "\n"))
+ (insert "(USR):")
+ (llm-chat-add-assistant-history (llm-chat-get-buffer))
+ (llm-chat-set-buffer nil)))))
+ (error
+ (message "Gemini filter: JSON parsing error: %s" (error-message-string err)))))
+ (setq llm-chat--incomplete "")))
+
+;; (defun llm-chat-gemini-filter (proc string)
+;; "Filter function to handle Gemini API JSON responses."
+;; (with-current-buffer (get-buffer-create "*LLM Chat*")
+;; (save-excursion
+;; (goto-char (point-max))
+;; (write-region (concat "start-----" string "-----end\n") nil "/tmp/stream.txt" t 'append)
+;; ;; Try to extract JSON from the received string
+;; (when-let* ((start-pos (string-match "{" string))
+;; (json-string (substring string start-pos)))
+;; (condition-case err
+;; (let* ((json-object (json-read-from-string json-string))
+;; (candidates (alist-get 'candidates json-object))
+;; (first-candidate (if (vectorp candidates) (aref candidates 0) nil))
+;; (content (when first-candidate
+;; (alist-get 'content first-candidate)))
+;; (parts (when content (alist-get 'parts content)))
+;; (text-part (when (and parts (vectorp parts))
+;; (alist-get 'text (aref parts 0))))
+;; (finish-reason (when first-candidate
+;; (alist-get 'finishReason first-candidate))))
+
+;; (when text-part
+;; (if (not (llm-chat-get-buffer))
+;; (progn
+;; (if (not (bolp))
+;; (insert "\n"))
+;; (insert "(AI): ")))
+;; (insert text-part)
+;; (llm-chat-set-buffer (concat (or (llm-chat-get-buffer) "") text-part)))
+
+;; (when (and finish-reason (string= finish-reason "STOP"))
+;; (if (not (bolp))
+;; (insert "\n"))
+;; (insert "(USR):")
+;; (llm-chat-add-assistant-history (llm-chat-get-buffer))
+;; (llm-chat-set-buffer nil))))))))
+
+(defun llm-chat-gemini-print-content (string)
+ (let* ((json-object (json-read-from-string string))
+ (candidates (alist-get 'candidates json-object))
+ (first-candidate (aref candidates 0)))
+ (when-let* ((content (alist-get 'content first-candidate))
+ (parts (alist-get 'parts content))
+ (first-part (aref parts 0))
+ (text (alist-get 'text first-part)))
+ text)))
+
+(provide 'llm-chat-gemini)
+
+;;; llm-chat-gemini.el ends here
diff --git a/llm-chat-openai.el b/llm-chat-openai.el
new file mode 100644
index 0000000..a093dde
--- /dev/null
+++ b/llm-chat-openai.el
@@ -0,0 +1,58 @@
+;;; llm-chat-openai.el --- Implements openai llm intergration -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; Code:
+
+(require 'llm-chat-api)
+
+(defvar llm-chat-openai-models '("gpt-4.1" "gpt-3.5-turbo" "gpt-5"))
+
+(cl-defgeneric llm-chat-resolve-model-backend ((backend (eql 'openai))
+ model)
+ (if (member model llm-chat-openai-models)
+ t))
+
+(defun llm-chat-openai-get-model ()
+ (if (member (llm-chat-get-model) llm-chat-openai-models)
+ (llm-chat-get-model)
+ "gpt-3.5-turbo"))
+
+(defun llm-chat-make-openai-backend ()
+ (make-llm-chat-backend
+ :api-endpoint "https://api.openai.com/v1/responses"
+ :api-key (auth-source-pick-first-password :host "api.openai.com")
+ :headers-fn #'llm-chat-openai-headers
+ :json-fn #'llm-chat-openai-json
+ :filter-fn #'llm-chat-openai-filter))
+
+(defun llm-chat-openai-json ()
+ (let ((json-object `((model . ,(llm-chat-openai-get-model))
+ (input . ,(llm-chat-get-history))
+ (temperature . ,(llm-chat-get-temperature))
+ (stream . ,(llm-chat-get-stream?)))))
+ (json-encode json-object)))
+
+(defun llm-chat-openai-headers (backend)
+ (list "-H" (concat "Authorization: " (concat "Bearer "
+ (llm-chat-backend-api-key backend)))))
+(defun llm-chat-openai-filter (proc string)
+ (llm-chat-data-filter proc string
+ #'llm-chat-openai-finished-p
+ #'llm-chat-openai-parse-content))
+
+(defun llm-chat-openai-finished-p (string)
+ (when-let* ((json-object (json-read-from-string string))
+ (type (alist-get 'type json-object)))
+ (string= type "response.output_item.done")))
+
+(defun llm-chat-openai-parse-content (string)
+ (when-let* ((json-object (json-read-from-string string))
+ (type (alist-get 'type json-object))
+ (text (alist-get 'delta json-object)))
+ (when (string-equal type "response.output_text.delta")
+ text)))
+
+(provide 'llm-chat-openai)
+
+;;; llm-chat-openai.el ends here
diff --git a/llm-chat-replacements.el b/llm-chat-replacements.el
new file mode 100644
index 0000000..d37ff40
--- /dev/null
+++ b/llm-chat-replacements.el
@@ -0,0 +1,55 @@
+;;; llm-chat-replacements.el --- Chat replacements functionality -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; Code:
+
+(defcustom llm-chat-user-replacements nil
+ "User defined replacement key value pairs.
+Must be a list of cons cells like (\"key\" . \"value\")."
+ :type '(alist :key-type string :value-type string)
+ :group 'llm-chat)
+
+(defvar llm-chat-replacements-dict (make-hash-table :test 'equal))
+
+(defun llm-chat-get-replacements-dict ()
+ llm-chat-replacements-dict)
+
+(defun llm-chat-reset-replacements-dict ()
+ (clrhash llm-chat-replacements-dict))
+
+(defun llm-chat-remove-replacementss-dict (key)
+ (remhash key llm-chat-replacements-dict))
+
+(defun llm-chat-add-replacements-dict (key-value-pair)
+ (if (and (consp key-value-pair)
+ (stringp (car key-value-pair))
+ (stringp (cdr key-value-pair)))
+ (puthash (car key-value-pair) (cdr key-value-pair) llm-chat-replacements-dict)
+ (message "key-value-pair is not a single key value cons")))
+
+(defun llm-chat-add-list-replacements-dict (list-of-key-value-pairs)
+ (dolist (key-value-pair list-of-key-value-pairs)
+ (llm-chat-add-replacements-dict key-value-pair)))
+
+(defun llm-chat-init-replacements-dict ()
+ (llm-chat-reset-replacements-dict)
+ (when llm-chat-user-replacementss
+ (llm-chat-add-list-replacements-dict llm-chat-user-replacementss)))
+
+(defun llm-chat-bulk-replacements-with-dict (text dict)
+ (with-temp-buffer
+ (insert text)
+ (cond
+ ((hash-table-p dict)
+ (maphash
+ (lambda (key value)
+ (goto-char (point-min))
+ (while (search-forward key nil t)
+ (replace-match value t t)))
+ dict)))
+ (buffer-string)))
+
+(provide 'llm-chat-replacements)
+
+;;; llm-chat-replacements.el ends here
diff --git a/llm-chat.el b/llm-chat.el
index 57b5288..ea3e92e 100644
--- a/llm-chat.el
+++ b/llm-chat.el
@@ -25,30 +25,109 @@
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
;;; Code:
-
(require 'transient)
+(require 'llm-chat-api)
+(require 'llm-chat-replacements)
+(require 'llm-chat-gemini)
+(require 'llm-chat-claude)
+(require 'llm-chat-openai)
+(require 'llm-chat-deepseek)
(transient-define-prefix llm-chat-prompt-menu ()
["llm-chat prompt menu"
- ("u" "user prompt" ignore)
- ("s" "system prompt" ignore)])
+ ("u" "user prompt" llm-chat-insert-user-prompt)
+ ("y" "youtube summary" llm-chat-youtube-transcript-prompt)
+ ("s" "system prompt" llm-chat-insert-system-prompt)])
(transient-define-prefix llm-chat-settings-menu ()
["llm-chat settings menu"
- ("t" "temperature" ignore)
+ ("t" "temperature" llm-chat-set-temperature)
("s" "stream" ignore)])
+(transient-define-prefix llm-chat-model-menu ()
+ ["llm-chat settings menu"
+ ("dc" "deepseek-chat" (lambda () (interactive) (llm-chat-set-model "deepseek-chat")))
+ ("dk" "deepseek-coder" (lambda () (interactive )(llm-chat-set-model "deepseek-coder")))
+ ("dr" "deepseek-reasoner" (lambda () (interactive) (llm-chat-set-model "deepseek-reasoner")))
+ ("g4" "gpt-4.1" (lambda () (interactive) (llm-chat-set-model "gpt-4.1")))
+ ("g5" "gpt-5" (lambda () (interactive) (llm-chat-set-model "gpt-5")))
+ ("ch" "claude-3-5-haiku-latest" (lambda () (interactive) (llm-chat-set-model "claude-3-5-haiku-latest")))])
+
(transient-define-prefix llm-chat-menu ()
["llm-chat menu"
- ("p" "prompt evaluate" ignore)
+ ("p" "prompt evaluate" llm-chat-capture-prompt)
("i" "insert prompt" llm-chat-prompt-menu)
- ("m" "model select" ignore)
+ ("m" "model select" llm-chat-model-menu)
("hs" "history save" ignore)
("hl" "history load" ignore)
- ("hc" "history clear" ignore)
+ ("hc" "history clear" llm-chat-reset-history)
("s" "settings" llm-chat-settings-menu)
("q" "close menu" ignore)])
+(defun llm-chat-resolve-backend ()
+ (cond ((llm-chat-resolve-model-backend 'deepseek (llm-chat-get-model))
+ (llm-chat-make-deepseek-backend))
+ ((llm-chat-resolve-model-backend 'gemini (llm-chat-get-model))
+ (llm-chat-make-gemini-backend))
+ ((llm-chat-resolve-model-backend 'claude (llm-chat-get-model))
+ (llm-chat-make-claude-backend))
+ ((llm-chat-resolve-model-backend 'openai (llm-chat-get-model))
+ (llm-chat-make-openai-backend))
+ (t
+ (progn
+ (message "Supported model not selected cannot resolve backend")
+ nil))))
+
+(defun llm-chat-capture-prompt ()
+ (interactive)
+ (if-let* ((end-pos (point))
+ (search (re-search-backward "(\\(AI\\|USR\\|SYS\\)):" nil t))
+ (match (car (member (match-string 1) '("USR" "SYS"))))
+ (start-pos (+ (point) 6))
+ (prompt (buffer-substring-no-properties start-pos end-pos)))
+ (cond ((string= match "USR")
+ (when-let (backend (llm-chat-resolve-backend))
+ (if (not (string= (buffer-name) "*LLM Chat*"))
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (goto-char (point-max))
+ (insert prompt)))
+ (llm-chat-request backend
+ (llm-chat-bulk-replacements-with-dict prompt (llm-chat-get-replacements-dict)))
+ (goto-char end-pos)))
+ ((string= match "SYS")
+ (progn
+ (llm-chat-set-system-prompt prompt)
+ (goto-char end-pos))))
+ (message "could not process prompt, no (USR) or (SYS) prompt")))
+
+(defun llm-chat-youtube-transcript-prompt ()
+ (interactive)
+ (let ((file (read-file-name "Select YouTube transcript file: ")))
+ (when (file-exists-p file)
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (goto-char (point-max))
+ (insert "(USR): You are an expert summarizer tasked with creating a concise, structured summary of a YouTube video based on its subtitles. Follow these rules:
+
+1. **Identify the Core Topic**:
+ - What is the main subject of the video? (1 sentence)
+
+2. **Key Points**:
+ - Extract 3–5 central arguments, steps, or insights. Use bullet points.
+ - Ignore filler words, ads, or off-topic tangents.
+
+3. **Conclusion/Takeaway**:
+ - What should the viewer remember? (1–2 sentences)
+
+4. **Style**:
+ - Neutral tone, avoid opinionated language.
+ - Use plain English (no jargon unless necessary).
+ - Keep the summary under 200 words.
+
+Here are the subtitles:\n")
+ (insert-file-contents file)
+ (goto-char (point-max))
+ ))))
+
(provide 'llm-chat)
;;; llm-chat.el ends here