Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion lua/gp/config.lua
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ local config = {
},
ollama = {
disable = true,
endpoint = "http://localhost:11434/v1/chat/completions",
endpoint = "http://localhost:11434/api/chat",
secret = "dummy_secret",
},
lmstudio = {
Expand Down Expand Up @@ -196,6 +196,19 @@ local config = {
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "ollama",
name = "ChatQwen3-8B",
chat = true,
command = false,
-- string with model name or table with model name and parameters
model = {
model = "qwen3:8b",
think = false, -- toggle thinking mode for Ollama's thinking models
},
-- system prompt (use this to specify the persona/role of the AI)
system_prompt = "You are a general AI assistant.",
},
{
provider = "lmstudio",
name = "ChatLMStudio",
Expand Down
46 changes: 46 additions & 0 deletions lua/gp/dispatcher.lua
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,41 @@ D.prepare_payload = function(messages, model, provider)
return payload
end

if provider == "ollama" then
local payload = {
model = model.model,
stream = true,
messages = messages,
}

if model.think ~= nil then
payload.think = model.think
end

local options = {}
if model.temperature then
options.temperature = math.max(0, math.min(2, model.temperature))
end
if model.top_p then
options.top_p = math.max(0, math.min(1, model.top_p))
end
if model.min_p then
options.min_p = math.max(0, math.min(1, model.min_p))
end
if model.num_ctx then
options.num_ctx = model.num_ctx
end
if model.top_k then
options.top_k = model.top_k
end

if next(options) then
payload.options = options
end

return payload
end

local output = {
model = model.model,
stream = true,
Expand Down Expand Up @@ -270,6 +305,15 @@ local query = function(buf, provider, payload, handler, on_exit, callback)
end
end

if qt.provider == "ollama" then
if line:match('"message":') and line:match('"content":') then
local success, decoded = pcall(vim.json.decode, line)
if success and decoded.message and decoded.message.content then
content = decoded.message.content
end
end
end


if content and type(content) == "string" then
qt.response = qt.response .. content
Expand Down Expand Up @@ -391,6 +435,8 @@ local query = function(buf, provider, payload, handler, on_exit, callback)
"api-key: " .. bearer,
}
endpoint = render.template_replace(endpoint, "{{model}}", payload.model)
elseif provider == "ollama" then
headers = {}
else -- default to openai compatible headers
headers = {
"-H",
Expand Down