summaryrefslogtreecommitdiff
path: root/llm-chat.el
blob: ea3e92eef36020104448ab028a82276f3b9e4afe (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
;;; llm-chat.el --- Use llm's in emacs -*- lexical-binding: t; -*-

;; This file is not part of GNU Emacs.

;; Copyright (C) 2025 Devyn Challman

;; Author: Devyn Challman devyn@challman.org
;; Maintainer: Devyn Challman devyn@challman.org
;; Version: 0.0.1
;; Keywords: llm, ai, chat

;;; License:

;; llm-chat.el is free software: you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.

;; llm-chat.el is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
;; GNU General Public License for more details.

;; You should have received a copy of the GNU General Public License
;; along with this program.  If not, see <https://www.gnu.org/licenses/>.

;;; Code:
(require 'transient)
(require 'llm-chat-api)
(require 'llm-chat-replacements)
(require 'llm-chat-gemini)
(require 'llm-chat-claude)
(require 'llm-chat-openai)
(require 'llm-chat-deepseek)

(transient-define-prefix llm-chat-prompt-menu ()
  ["llm-chat prompt menu"
   ("u" "user prompt" llm-chat-insert-user-prompt)
   ("y" "youtube summary" llm-chat-youtube-transcript-prompt)
   ("s" "system prompt" llm-chat-insert-system-prompt)])

(transient-define-prefix llm-chat-settings-menu ()
  ["llm-chat settings menu"
   ("t" "temperature" llm-chat-set-temperature)
   ("s" "stream" ignore)])

(transient-define-prefix llm-chat-model-menu ()
  ["llm-chat settings menu"
   ("dc" "deepseek-chat" (lambda () (interactive) (llm-chat-set-model "deepseek-chat")))
   ("dk" "deepseek-coder" (lambda () (interactive )(llm-chat-set-model "deepseek-coder")))
   ("dr" "deepseek-reasoner" (lambda () (interactive) (llm-chat-set-model "deepseek-reasoner")))
   ("g4" "gpt-4.1" (lambda () (interactive) (llm-chat-set-model "gpt-4.1")))
   ("g5" "gpt-5" (lambda () (interactive) (llm-chat-set-model "gpt-5")))
   ("ch" "claude-3-5-haiku-latest" (lambda () (interactive) (llm-chat-set-model "claude-3-5-haiku-latest")))])

(transient-define-prefix llm-chat-menu ()
  ["llm-chat menu"
   ("p" "prompt evaluate" llm-chat-capture-prompt)
   ("i" "insert prompt" llm-chat-prompt-menu)
   ("m" "model select" llm-chat-model-menu)
   ("hs" "history save" ignore)
   ("hl" "history load" ignore)
   ("hc" "history clear" llm-chat-reset-history)
   ("s" "settings" llm-chat-settings-menu)
   ("q" "close menu" ignore)])

(defun llm-chat-resolve-backend ()
  (cond ((llm-chat-resolve-model-backend 'deepseek (llm-chat-get-model))
	 (llm-chat-make-deepseek-backend))
	((llm-chat-resolve-model-backend 'gemini (llm-chat-get-model))
	 (llm-chat-make-gemini-backend))
	((llm-chat-resolve-model-backend 'claude (llm-chat-get-model))
	 (llm-chat-make-claude-backend))
	((llm-chat-resolve-model-backend 'openai (llm-chat-get-model))
	 (llm-chat-make-openai-backend))
	(t
	 (progn
	   (message "Supported model not selected cannot resolve backend")
	   nil))))

(defun llm-chat-capture-prompt ()
  (interactive)
  (if-let* ((end-pos (point))
	   (search (re-search-backward "(\\(AI\\|USR\\|SYS\\)):" nil t))
	   (match (car (member (match-string 1) '("USR" "SYS"))))
	   (start-pos (+ (point) 6))
	   (prompt (buffer-substring-no-properties start-pos end-pos)))
      (cond ((string= match "USR")
	     (when-let (backend (llm-chat-resolve-backend))
	       (if (not (string= (buffer-name) "*LLM Chat*"))
		   (with-current-buffer (get-buffer-create "*LLM Chat*")
		     (goto-char (point-max))
		     (insert prompt)))
	       (llm-chat-request backend
				 (llm-chat-bulk-replacements-with-dict  prompt (llm-chat-get-replacements-dict)))
	       (goto-char end-pos)))
	    ((string= match "SYS")
	      (progn
	       (llm-chat-set-system-prompt prompt)
	       (goto-char end-pos))))
    (message "could not process prompt, no (USR) or (SYS) prompt")))

(defun llm-chat-youtube-transcript-prompt ()
  (interactive)
  (let ((file (read-file-name "Select YouTube transcript file: ")))
    (when (file-exists-p file)
      (with-current-buffer (get-buffer-create "*LLM Chat*")
	(goto-char (point-max))
	(insert "(USR): You are an expert summarizer tasked with creating a concise, structured summary of a YouTube video based on its subtitles. Follow these rules:

1. **Identify the Core Topic**:
   - What is the main subject of the video? (1 sentence)

2. **Key Points**:
   - Extract 3–5 central arguments, steps, or insights. Use bullet points.
   - Ignore filler words, ads, or off-topic tangents.

3. **Conclusion/Takeaway**:
   - What should the viewer remember? (1–2 sentences)

4. **Style**:
   - Neutral tone, avoid opinionated language.
   - Use plain English (no jargon unless necessary).
   - Keep the summary under 200 words.

Here are the subtitles:\n")
	(insert-file-contents file)
	(goto-char (point-max))
	))))

(provide 'llm-chat)

;;; llm-chat.el ends here