-
Notifications
You must be signed in to change notification settings - Fork 173
/
Copy pathgptel-ollama.el
167 lines (136 loc) · 5.66 KB
/
gptel-ollama.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
;;; gptel-ollama.el --- Ollama support for gptel -*- lexical-binding: t; -*-
;; Copyright (C) 2023 Karthik Chikmagalur
;; Author: Karthik Chikmagalur <[email protected]>
;; Keywords: hypermedia
;; This program is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
;;; Commentary:
;; This file adds support for the Ollama LLM API to gptel
;;; Code:
(require 'gptel)
(require 'cl-generic)
(declare-function json-read "json" ())
(defvar json-object-type)
;;; Ollama
(cl-defstruct (gptel-ollama (:constructor gptel--make-ollama)
(:copier nil)
(:include gptel-backend)))
(defvar-local gptel--ollama-context nil
"Context for ollama conversations.
This variable holds the context array for conversations with
Ollama models.")
(cl-defmethod gptel-curl--parse-stream ((_backend gptel-ollama) info)
";TODO: "
(when (bobp)
(re-search-forward "^{")
(forward-line 0))
(let* ((content-strs) (content) (pt (point)))
(condition-case nil
(while (setq content (gptel--json-read))
(setq pt (point))
(let ((done (map-elt content :done))
(response (map-elt content :response)))
(push response content-strs)
(unless (eq done :json-false)
(with-current-buffer (plist-get info :buffer)
(setq gptel--ollama-context (map-elt content :context)))
(goto-char (point-max)))))
(error (goto-char pt)))
(apply #'concat (nreverse content-strs))))
(cl-defmethod gptel--parse-response ((_backend gptel-ollama) response info)
(when-let ((context (map-elt response :context)))
(with-current-buffer (plist-get info :buffer)
(setq gptel--ollama-context context)))
(map-elt response :response))
(cl-defmethod gptel--request-data ((_backend gptel-ollama) prompts)
"JSON encode PROMPTS for Ollama."
(let ((prompts-plist
`(:model ,gptel-model
,@prompts
:stream ,(or (and gptel-stream gptel-use-curl
(gptel-backend-stream gptel-backend))
:json-false))))
(when gptel--ollama-context
(plist-put prompts-plist :context gptel--ollama-context))
prompts-plist))
(cl-defmethod gptel--parse-buffer ((_backend gptel-ollama) &optional _max-entries)
(let ((prompts)
(prop (text-property-search-backward
'gptel 'response
(when (get-char-property (max (point-min) (1- (point)))
'gptel)
t))))
(if (and (prop-match-p prop)
(prop-match-value prop))
(user-error "No user prompt found!")
(setq prompts (list
:system gptel--system-message
:prompt
(if (prop-match-p prop)
(string-trim
(buffer-substring-no-properties (prop-match-beginning prop)
(prop-match-end prop))
(format "[\t\r\n ]*\\(?:%s\\)?[\t\r\n ]*"
(regexp-quote (gptel-prompt-prefix-string)))
(format "[\t\r\n ]*\\(?:%s\\)?[\t\r\n ]*"
(regexp-quote (gptel-response-prefix-string))))
"")))
prompts)))
;;;###autoload
(cl-defun gptel-make-ollama
(name &key curl-args header key models stream
(host "localhost:11434")
(protocol "http")
(endpoint "/api/generate"))
"Register an Ollama backend for gptel with NAME.
Keyword arguments:
CURL-ARGS (optional) is a list of additional Curl arguments.
HOST is where Ollama runs (with port), defaults to localhost:11434
MODELS is a list of available model names.
STREAM is a boolean to toggle streaming responses, defaults to
false.
PROTOCOL (optional) specifies the protocol, http by default.
ENDPOINT (optional) is the API endpoint for completions, defaults to
\"/api/generate\".
HEADER (optional) is for additional headers to send with each
request. It should be an alist or a function that retuns an
alist, like:
((\"Content-Type\" . \"application/json\"))
KEY (optional) is a variable whose value is the API key, or
function that returns the key. This is typically not required
for local models like Ollama.
Example:
-------
(gptel-make-ollama
\"Ollama\"
:host \"localhost:11434\"
:models \\='(\"mistral:latest\")
:stream t)"
(declare (indent 1))
(let ((backend (gptel--make-ollama
:curl-args curl-args
:name name
:host host
:header header
:key key
:models models
:protocol protocol
:endpoint endpoint
:stream stream
:url (if protocol
(concat protocol "://" host endpoint)
(concat host endpoint)))))
(prog1 backend
(setf (alist-get name gptel--known-backends
nil nil #'equal)
backend))))
(provide 'gptel-ollama)
;;; gptel-ollama.el ends here