blob: 96082f866308f7d111e78c62ab0ddb385cb7b157 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
|
;;; llm-chat-api.el --- Common interfaces to use -*- lexical-binding: t; -*-
;; This file is not part of GNU Emacs.
;;; Code:
(defvar llm-chat-system-prompt "You are a helpful bot. You make concise accurate responses. You only give the choice you think is best - not multiple examples. You are a professional at creating custom operating systems. Excelling at performance and optimization for running programs on low spec devices that still provide the best user experience")
(defvar llm-chat-history `(((role . "system") (content . ,llm-chat-system-prompt))))
(defvar llm-chat-model nil)
(defvar llm-chat-temperature 0.3)
(defvar llm-chat-stream t)
(defvar llm-chat-buffer nil)
(defvar llm-chat--incomplete "")
(defun llm-chat-get-system-prompt ()
llm-chat-system-prompt)
(defun llm-chat-set-system-prompt (prompt)
(setf (cdr (assoc 'content (car llm-chat-history))) prompt)
(setq llm-chat-system-prompt prompt))
(defun llm-chat-get-history ()
llm-chat-history)
(defun llm-chat-add-history (entry)
(setq llm-chat-history (append llm-chat-history entry)))
(defun llm-chat-add-user-history (prompt)
(llm-chat-add-history `(((role . "user") (content . ,prompt)))))
(defun llm-chat-add-assistant-history (prompt)
(llm-chat-add-history `(((role . "assistant") (content . ,prompt)))))
(defun llm-chat-reset-history ()
(interactive)
(setq llm-chat-history `(((role . "system") (content . ,(llm-chat-get-system-prompt)))))
(message "history has been reset to current system prompt"))
(defun llm-chat-save-history (path))
(defun llm-chat-load-history (path))
(defun llm-chat-get-model ()
llm-chat-model)
(defun llm-chat-set-model (model)
(setq llm-chat-model model)
(message (concat "model set to " llm-chat-model)))
(defun llm-chat-get-temperature ()
llm-chat-temperature)
(defun llm-chat-set-temperature (temp)
(interactive "nenter a value between 0.0 and 1.0: ")
(if (and (>= temp 0.0) (<= temp 1.0))
(setq llm-chat-temperature temp)
(message "temperature must be between 0.0 and 1.0")))
(defun llm-chat-get-stream? ()
llm-chat-stream)
(defun llm-chat-set-stream (stream?)
(setq llm-chat-stream stream?))
(defun llm-chat-get-buffer ()
llm-chat-buffer)
(defun llm-chat-set-buffer (buffer)
(setq llm-chat-buffer buffer))
(defun llm-chat-insert-user-prompt ()
(interactive)
(insert "(USR):"))
(defun llm-chat-insert-system-prompt ()
(interactive)
(insert "(SYS):"))
(cl-defstruct llm-chat-backend
api-endpoint
api-key
headers-fn
json-fn
filter-fn)
(cl-defgeneric llm-chat-resolve-model-backend (backend model))
(cl-defgeneric llm-chat-request (backend prompt)
"This is to make a llm api request")
(cl-defmethod llm-chat-request ((backend llm-chat-backend) prompt)
(llm-chat-set-buffer nil)
(llm-chat-add-user-history prompt)
(with-current-buffer (get-buffer-create "*LLM Chat*")
(unless (derived-mode-p 'markdown-mode)
(markdown-mode)))
(let ((temp-file (make-temp-file "llm-chat-request-" nil ".json"))
(headers-fn (llm-chat-backend-headers-fn backend))
(json-fn (llm-chat-backend-json-fn backend))
(filter-fn (llm-chat-backend-filter-fn backend)))
(with-temp-file temp-file
(insert (funcall json-fn)))
(make-process
:name "llm-chat-stream"
:command (append (list "curl" "--no-buffer" "-X" "POST"
"-H" "Content-Type: application/json"
"-d" (concat "@" temp-file)
(llm-chat-backend-api-endpoint backend))
(funcall headers-fn backend))
:filter filter-fn)))
(defun llm-chat-data-filter (proc string backend-finished-p-fn backend-parse-content-fn)
(with-current-buffer (get-buffer-create "*LLM Chat*")
(save-excursion
(goto-char (point-max))
(let ((data (concat llm-chat--incomplete string))
(start 0)
matches)
(while (string-match "data: \\(.*\\)\n\n" data start)
(write-region data nil "/tmp/stream.txt" t)
(push (match-string 1 data) matches)
(setq start (match-end 0)))
(setq llm-chat--incomplete (substring data start))
(dolist (match (nreverse matches))
(if (funcall backend-finished-p-fn match)
(progn
(if (not (bolp)) (insert "\n"))
(insert "(USR):")
(llm-chat-add-assistant-history (llm-chat-get-buffer))
(llm-chat-set-buffer nil)
(setq llm-chat--incomplete ""))
(when-let ((content (funcall backend-parse-content-fn match)))
(if (not (llm-chat-get-buffer))
(progn
(if (not (bolp)) (insert "\n"))
(insert "(AI): ")))
(insert content)
(llm-chat-set-buffer (concat (llm-chat-get-buffer) content)))))))))
(provide 'llm-chat-api)
;;; llm-chat-api.el ends here
|