summaryrefslogtreecommitdiff
path: root/llm-chat-api.el
diff options
context:
space:
mode:
Diffstat (limited to 'llm-chat-api.el')
-rw-r--r--llm-chat-api.el142
1 files changed, 142 insertions, 0 deletions
diff --git a/llm-chat-api.el b/llm-chat-api.el
new file mode 100644
index 0000000..96082f8
--- /dev/null
+++ b/llm-chat-api.el
@@ -0,0 +1,142 @@
+;;; llm-chat-api.el --- Common interfaces to use -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; Code:
+
+(defvar llm-chat-system-prompt "You are a helpful bot. You make concise accurate responses. You only give the choice you think is best - not multiple examples. You are a professional at creating custom operating systems. Excelling at performance and optimization for running programs on low spec devices that still provide the best user experience")
+(defvar llm-chat-history `(((role . "system") (content . ,llm-chat-system-prompt))))
+(defvar llm-chat-model nil)
+(defvar llm-chat-temperature 0.3)
+(defvar llm-chat-stream t)
+(defvar llm-chat-buffer nil)
+(defvar llm-chat--incomplete "")
+
+(defun llm-chat-get-system-prompt ()
+ llm-chat-system-prompt)
+
+(defun llm-chat-set-system-prompt (prompt)
+ (setf (cdr (assoc 'content (car llm-chat-history))) prompt)
+ (setq llm-chat-system-prompt prompt))
+
+(defun llm-chat-get-history ()
+ llm-chat-history)
+
+(defun llm-chat-add-history (entry)
+ (setq llm-chat-history (append llm-chat-history entry)))
+
+(defun llm-chat-add-user-history (prompt)
+ (llm-chat-add-history `(((role . "user") (content . ,prompt)))))
+
+(defun llm-chat-add-assistant-history (prompt)
+ (llm-chat-add-history `(((role . "assistant") (content . ,prompt)))))
+
+(defun llm-chat-reset-history ()
+ (interactive)
+ (setq llm-chat-history `(((role . "system") (content . ,(llm-chat-get-system-prompt)))))
+ (message "history has been reset to current system prompt"))
+
+(defun llm-chat-save-history (path))
+
+(defun llm-chat-load-history (path))
+
+(defun llm-chat-get-model ()
+ llm-chat-model)
+
+(defun llm-chat-set-model (model)
+ (setq llm-chat-model model)
+ (message (concat "model set to " llm-chat-model)))
+
+(defun llm-chat-get-temperature ()
+ llm-chat-temperature)
+
+(defun llm-chat-set-temperature (temp)
+ (interactive "nenter a value between 0.0 and 1.0: ")
+ (if (and (>= temp 0.0) (<= temp 1.0))
+ (setq llm-chat-temperature temp)
+ (message "temperature must be between 0.0 and 1.0")))
+
+(defun llm-chat-get-stream? ()
+ llm-chat-stream)
+
+(defun llm-chat-set-stream (stream?)
+ (setq llm-chat-stream stream?))
+
+(defun llm-chat-get-buffer ()
+ llm-chat-buffer)
+
+(defun llm-chat-set-buffer (buffer)
+ (setq llm-chat-buffer buffer))
+
+(defun llm-chat-insert-user-prompt ()
+ (interactive)
+ (insert "(USR):"))
+
+(defun llm-chat-insert-system-prompt ()
+ (interactive)
+ (insert "(SYS):"))
+
+(cl-defstruct llm-chat-backend
+ api-endpoint
+ api-key
+ headers-fn
+ json-fn
+ filter-fn)
+
+(cl-defgeneric llm-chat-resolve-model-backend (backend model))
+
+(cl-defgeneric llm-chat-request (backend prompt)
+ "This is to make a llm api request")
+
+(cl-defmethod llm-chat-request ((backend llm-chat-backend) prompt)
+ (llm-chat-set-buffer nil)
+ (llm-chat-add-user-history prompt)
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (unless (derived-mode-p 'markdown-mode)
+ (markdown-mode)))
+ (let ((temp-file (make-temp-file "llm-chat-request-" nil ".json"))
+ (headers-fn (llm-chat-backend-headers-fn backend))
+ (json-fn (llm-chat-backend-json-fn backend))
+ (filter-fn (llm-chat-backend-filter-fn backend)))
+ (with-temp-file temp-file
+ (insert (funcall json-fn)))
+ (make-process
+ :name "llm-chat-stream"
+ :command (append (list "curl" "--no-buffer" "-X" "POST"
+ "-H" "Content-Type: application/json"
+ "-d" (concat "@" temp-file)
+ (llm-chat-backend-api-endpoint backend))
+ (funcall headers-fn backend))
+ :filter filter-fn)))
+
+(defun llm-chat-data-filter (proc string backend-finished-p-fn backend-parse-content-fn)
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (save-excursion
+ (goto-char (point-max))
+ (let ((data (concat llm-chat--incomplete string))
+ (start 0)
+ matches)
+ (while (string-match "data: \\(.*\\)\n\n" data start)
+ (write-region data nil "/tmp/stream.txt" t)
+ (push (match-string 1 data) matches)
+ (setq start (match-end 0)))
+ (setq llm-chat--incomplete (substring data start))
+ (dolist (match (nreverse matches))
+ (if (funcall backend-finished-p-fn match)
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(USR):")
+ (llm-chat-add-assistant-history (llm-chat-get-buffer))
+ (llm-chat-set-buffer nil)
+ (setq llm-chat--incomplete ""))
+ (when-let ((content (funcall backend-parse-content-fn match)))
+ (if (not (llm-chat-get-buffer))
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(AI): ")))
+ (insert content)
+ (llm-chat-set-buffer (concat (llm-chat-get-buffer) content)))))))))
+
+(provide 'llm-chat-api)
+
+;;; llm-chat-api.el ends here