Skip to content

Commit 49f28af

Browse files
committed
add support for ollama's direct /api/chat processing
1 parent b6cde06 commit 49f28af

File tree

2 files changed

+13
-1
lines changed

2 files changed

+13
-1
lines changed

lua/gp/config.lua

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ local config = {
4949
},
5050
ollama = {
5151
disable = true,
52-
endpoint = "http://localhost:11434/v1/chat/completions",
52+
endpoint = "http://localhost:11434/api/chat",
5353
secret = "dummy_secret",
5454
},
5555
lmstudio = {

lua/gp/dispatcher.lua

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -270,6 +270,15 @@ local query = function(buf, provider, payload, handler, on_exit, callback)
270270
end
271271
end
272272

273+
if qt.provider == "ollama" then
274+
if line:match('"message":') and line:match('"content":') then
275+
local success, decoded = pcall(vim.json.decode, line)
276+
if success and decoded.message and decoded.message.content then
277+
content = decoded.message.content
278+
end
279+
end
280+
end
281+
273282

274283
if content and type(content) == "string" then
275284
qt.response = qt.response .. content
@@ -391,6 +400,9 @@ local query = function(buf, provider, payload, handler, on_exit, callback)
391400
"api-key: " .. bearer,
392401
}
393402
endpoint = render.template_replace(endpoint, "{{model}}", payload.model)
403+
elseif provider == "ollama" then
404+
-- Ollama local API typically doesn't require authentication
405+
headers = {}
394406
else -- default to openai compatible headers
395407
headers = {
396408
"-H",

0 commit comments

Comments
 (0)