summaryrefslogtreecommitdiff
path: root/llm-chat-gemini.el
diff options
context:
space:
mode:
Diffstat (limited to 'llm-chat-gemini.el')
-rw-r--r--llm-chat-gemini.el157
1 files changed, 157 insertions, 0 deletions
diff --git a/llm-chat-gemini.el b/llm-chat-gemini.el
new file mode 100644
index 0000000..5d4ae1b
--- /dev/null
+++ b/llm-chat-gemini.el
@@ -0,0 +1,157 @@
+;;; llm-chat-gemini.el --- Implements gemini llm intergration -*- lexical-binding: t; -*-
+
+;; This file is not part of GNU Emacs.
+
+;;; License:
+
+;; llm-chat-gemini.el is free software: you can redistribute it and/or modify
+;; it under the terms of the GNU General Public License as published by
+;; the Free Software Foundation, either version 3 of the License, or
+;; (at your option) any later version.
+
+;; llm-chat-gemini.el is distributed in the hope that it will be useful,
+;; but WITHOUT ANY WARRANTY; without even the implied warranty of
+;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+;; GNU General Public License for more details.
+
+;; You should have received a copy of the GNU General Public License
+;; along with this program. If not, see <https://www.gnu.org/licenses/>.
+
+;;; Code:
+
+(require 'llm-chat-api)
+
+(defvar llm-chat-gemini-models '("gemini-2.5-flash-preview-04-17" "gemini-2.5-pro-preview-03-25" "gemini-2.0-flash-lite" "gemini-1.5-flash" "gemini-1.5-flash-8b" "gemini-1.5-pro"))
+
+(cl-defgeneric llm-chat-resolve-model-backend ((backend (eql 'gemini))
+ model)
+ (if (member model llm-chat-gemini-models)
+ t))
+
+(defun llm-chat-gemini-get-model ()
+ (if (member (llm-chat-get-model) llm-chat-gemini-models)
+ (llm-chat-get-model)
+ "gemini-2.0-flash-lite"))
+
+(defun llm-chat-gemini-convert-history-contents ()
+ (let ((contents '()))
+ (dolist (history (llm-chat-get-history))
+ (let ((role (cdr (car history)))
+ (text (cdr (car (cdr history)))))
+
+ (cond ((or (string= role "user") (string= role "system"))
+ (setq contents (append contents `((role . "user")
+ (parts . (((text . ,text))))))))
+ ((string= role "assistant")
+ (setq contents (append contents `((role . "model")
+ (parts . (((text . ,text)))))))))))
+ contents))
+
+(defun llm-chat-gemini-json ()
+ (let ((json-object `((contents . (,(llm-chat-gemini-convert-history-contents)))
+ (generation_config . ((temperature . ,(llm-chat-get-temperature)))))))
+ (json-encode json-object)))
+
+(defun llm-chat-gemini-headers (backend)
+ ())
+
+(defun llm-chat-make-gemini-backend ()
+ (make-llm-chat-backend
+ :api-endpoint (concat "https://generativelanguage.googleapis.com/v1beta/models/"
+ (llm-chat-gemini-get-model)
+ ":streamGenerateContent?key="
+ (auth-source-pick-first-password :host "api.gemini.com"))
+ :api-key (auth-source-pick-first-password :host "api.gemini.com")
+ :headers-fn #'llm-chat-gemini-headers
+ :json-fn #'llm-chat-gemini-json
+ :filter-fn #'llm-chat-gemini-filter))
+
+(defun llm-chat-gemini-filter (proc string)
+ "Buffer incomplete JSON until a complete object is received, then parse."
+ (setq llm-chat--incomplete (concat llm-chat--incomplete string))
+ ;; Try to find a complete JSON object (simple heuristic: first { to last })
+ (write-region string nil "/tmp/stream.txt" t)
+ (when (and (string-match "{" llm-chat--incomplete)
+ (string-match "}[[:space:]]*\\'" llm-chat--incomplete))
+ (let ((json-str (substring llm-chat--incomplete
+ (string-match "{" llm-chat--incomplete))))
+ (condition-case err
+ (let* ((json-object (json-read-from-string json-str))
+ (candidates (alist-get 'candidates json-object))
+ (first-candidate (if (vectorp candidates) (aref candidates 0) nil))
+ (content (when first-candidate
+ (alist-get 'content first-candidate)))
+ (parts (when content (alist-get 'parts content)))
+ (text-part (when (and parts (vectorp parts) (> (length parts) 0))
+ (alist-get 'text (aref parts 0))))
+ (finish-reason (when first-candidate
+ (alist-get 'finishReason first-candidate))))
+ (with-current-buffer (get-buffer-create "*LLM Chat*")
+ (save-excursion
+ (goto-char (point-max))
+ (when text-part
+ (if (not (llm-chat-get-buffer))
+ (progn
+ (if (not (bolp)) (insert "\n"))
+ (insert "(AI): ")))
+ (insert text-part)
+ (llm-chat-set-buffer (concat (or (llm-chat-get-buffer) "") text-part)))
+ (when (and finish-reason (string= finish-reason "STOP"))
+ (if (not (bolp)) (insert "\n"))
+ (insert "(USR):")
+ (llm-chat-add-assistant-history (llm-chat-get-buffer))
+ (llm-chat-set-buffer nil)))))
+ (error
+ (message "Gemini filter: JSON parsing error: %s" (error-message-string err)))))
+ (setq llm-chat--incomplete "")))
+
+;; (defun llm-chat-gemini-filter (proc string)
+;; "Filter function to handle Gemini API JSON responses."
+;; (with-current-buffer (get-buffer-create "*LLM Chat*")
+;; (save-excursion
+;; (goto-char (point-max))
+;; (write-region (concat "start-----" string "-----end\n") nil "/tmp/stream.txt" t 'append)
+;; ;; Try to extract JSON from the received string
+;; (when-let* ((start-pos (string-match "{" string))
+;; (json-string (substring string start-pos)))
+;; (condition-case err
+;; (let* ((json-object (json-read-from-string json-string))
+;; (candidates (alist-get 'candidates json-object))
+;; (first-candidate (if (vectorp candidates) (aref candidates 0) nil))
+;; (content (when first-candidate
+;; (alist-get 'content first-candidate)))
+;; (parts (when content (alist-get 'parts content)))
+;; (text-part (when (and parts (vectorp parts))
+;; (alist-get 'text (aref parts 0))))
+;; (finish-reason (when first-candidate
+;; (alist-get 'finishReason first-candidate))))
+
+;; (when text-part
+;; (if (not (llm-chat-get-buffer))
+;; (progn
+;; (if (not (bolp))
+;; (insert "\n"))
+;; (insert "(AI): ")))
+;; (insert text-part)
+;; (llm-chat-set-buffer (concat (or (llm-chat-get-buffer) "") text-part)))
+
+;; (when (and finish-reason (string= finish-reason "STOP"))
+;; (if (not (bolp))
+;; (insert "\n"))
+;; (insert "(USR):")
+;; (llm-chat-add-assistant-history (llm-chat-get-buffer))
+;; (llm-chat-set-buffer nil))))))))
+
+(defun llm-chat-gemini-print-content (string)
+ (let* ((json-object (json-read-from-string string))
+ (candidates (alist-get 'candidates json-object))
+ (first-candidate (aref candidates 0)))
+ (when-let* ((content (alist-get 'content first-candidate))
+ (parts (alist-get 'parts content))
+ (first-part (aref parts 0))
+ (text (alist-get 'text first-part)))
+ text)))
+
+(provide 'llm-chat-gemini)
+
+;;; llm-chat-gemini.el ends here