diff --git a/lua/gp/dispatcher.lua b/lua/gp/dispatcher.lua index d4214aa..cebaf30 100644 --- a/lua/gp/dispatcher.lua +++ b/lua/gp/dispatcher.lua @@ -169,7 +169,7 @@ D.prepare_payload = function(messages, model, provider) model = model.model, stream = true, messages = messages, - max_tokens = model.max_tokens or 4096, + max_completion_tokens = model.max_completion_tokens or 4096, temperature = math.max(0, math.min(2, model.temperature or 1)), top_p = math.max(0, math.min(1, model.top_p or 1)), } @@ -185,7 +185,7 @@ D.prepare_payload = function(messages, model, provider) end end -- remove max_tokens, top_p, temperature for o1 models. https://platform.openai.com/docs/guides/reasoning/beta-limitations - output.max_tokens = nil + output.max_completion_tokens = nil output.temperature = nil output.top_p = nil end