;;; llm-chat.el --- Use llm's in emacs -*- lexical-binding: t; -*- ;; This file is not part of GNU Emacs. ;; Copyright (C) 2025 Devyn Challman ;; Author: Devyn Challman devyn@challman.org ;; Maintainer: Devyn Challman devyn@challman.org ;; Version: 0.0.1 ;; Keywords: llm, ai, chat ;;; License: ;; llm-chat.el is free software: you can redistribute it and/or modify ;; it under the terms of the GNU General Public License as published by ;; the Free Software Foundation, either version 3 of the License, or ;; (at your option) any later version. ;; llm-chat.el is distributed in the hope that it will be useful, ;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;; GNU General Public License for more details. ;; You should have received a copy of the GNU General Public License ;; along with this program. If not, see . ;;; Code: (require 'transient) (require 'llm-chat-api) (require 'llm-chat-replacements) (require 'llm-chat-gemini) (require 'llm-chat-claude) (require 'llm-chat-openai) (require 'llm-chat-deepseek) (transient-define-prefix llm-chat-prompt-menu () ["llm-chat prompt menu" ("u" "user prompt" llm-chat-insert-user-prompt) ("y" "youtube summary" llm-chat-youtube-transcript-prompt) ("s" "system prompt" llm-chat-insert-system-prompt)]) (transient-define-prefix llm-chat-settings-menu () ["llm-chat settings menu" ("t" "temperature" llm-chat-set-temperature) ("s" "stream" ignore)]) (transient-define-prefix llm-chat-model-menu () ["llm-chat settings menu" ("dc" "deepseek-chat" (lambda () (interactive) (llm-chat-set-model "deepseek-chat"))) ("dk" "deepseek-coder" (lambda () (interactive )(llm-chat-set-model "deepseek-coder"))) ("dr" "deepseek-reasoner" (lambda () (interactive) (llm-chat-set-model "deepseek-reasoner"))) ("g4" "gpt-4.1" (lambda () (interactive) (llm-chat-set-model "gpt-4.1"))) ("g5" "gpt-5" (lambda () (interactive) (llm-chat-set-model "gpt-5"))) ("ch" "claude-3-5-haiku-latest" (lambda () (interactive) (llm-chat-set-model "claude-3-5-haiku-latest")))]) (transient-define-prefix llm-chat-menu () ["llm-chat menu" ("p" "prompt evaluate" llm-chat-capture-prompt) ("i" "insert prompt" llm-chat-prompt-menu) ("m" "model select" llm-chat-model-menu) ("hs" "history save" ignore) ("hl" "history load" ignore) ("hc" "history clear" llm-chat-reset-history) ("s" "settings" llm-chat-settings-menu) ("q" "close menu" ignore)]) (defun llm-chat-resolve-backend () (cond ((llm-chat-resolve-model-backend 'deepseek (llm-chat-get-model)) (llm-chat-make-deepseek-backend)) ((llm-chat-resolve-model-backend 'gemini (llm-chat-get-model)) (llm-chat-make-gemini-backend)) ((llm-chat-resolve-model-backend 'claude (llm-chat-get-model)) (llm-chat-make-claude-backend)) ((llm-chat-resolve-model-backend 'openai (llm-chat-get-model)) (llm-chat-make-openai-backend)) (t (progn (message "Supported model not selected cannot resolve backend") nil)))) (defun llm-chat-capture-prompt () (interactive) (if-let* ((end-pos (point)) (search (re-search-backward "(\\(AI\\|USR\\|SYS\\)):" nil t)) (match (car (member (match-string 1) '("USR" "SYS")))) (start-pos (+ (point) 6)) (prompt (buffer-substring-no-properties start-pos end-pos))) (cond ((string= match "USR") (when-let (backend (llm-chat-resolve-backend)) (if (not (string= (buffer-name) "*LLM Chat*")) (with-current-buffer (get-buffer-create "*LLM Chat*") (goto-char (point-max)) (insert prompt))) (llm-chat-request backend (llm-chat-bulk-replacements-with-dict prompt (llm-chat-get-replacements-dict))) (goto-char end-pos))) ((string= match "SYS") (progn (llm-chat-set-system-prompt prompt) (goto-char end-pos)))) (message "could not process prompt, no (USR) or (SYS) prompt"))) (defun llm-chat-youtube-transcript-prompt () (interactive) (let ((file (read-file-name "Select YouTube transcript file: "))) (when (file-exists-p file) (with-current-buffer (get-buffer-create "*LLM Chat*") (goto-char (point-max)) (insert "(USR): You are an expert summarizer tasked with creating a concise, structured summary of a YouTube video based on its subtitles. Follow these rules: 1. **Identify the Core Topic**: - What is the main subject of the video? (1 sentence) 2. **Key Points**: - Extract 3–5 central arguments, steps, or insights. Use bullet points. - Ignore filler words, ads, or off-topic tangents. 3. **Conclusion/Takeaway**: - What should the viewer remember? (1–2 sentences) 4. **Style**: - Neutral tone, avoid opinionated language. - Use plain English (no jargon unless necessary). - Keep the summary under 200 words. Here are the subtitles:\n") (insert-file-contents file) (goto-char (point-max)) )))) (provide 'llm-chat) ;;; llm-chat.el ends here