Skip to content
This repository was archived by the owner on Jul 22, 2025. It is now read-only.

Commit d97307e

Browse files
authored
FEATURE: optionally support OpenAI responses API (#1423)
OpenAI ship a new API for completions called "Responses API" Certain models (o3-pro) require this API. Additionally certain features are only made available to the new API. This allow enabling it per LLM. see: https://platform.openai.com/docs/api-reference/responses
1 parent 35d62a6 commit d97307e

File tree

7 files changed

+510
-34
lines changed

7 files changed

+510
-34
lines changed

app/models/llm_model.rb

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ def self.provider_params
5252
disable_temperature: :checkbox,
5353
disable_top_p: :checkbox,
5454
disable_streaming: :checkbox,
55+
enable_responses_api: :checkbox,
5556
reasoning_effort: {
5657
type: :enum,
5758
values: %w[default low medium high],

config/locales/client.en.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -579,6 +579,7 @@ en:
579579
reasoning_tokens: "Number of tokens used for reasoning"
580580
disable_temperature: "Disable temperature (some thinking models don't support temperature)"
581581
disable_top_p: "Disable top P (some thinking models don't support top P)"
582+
enable_responses_api: "Enable responses API (required on certain OpenAI models)"
582583

583584
related_topics:
584585
title: "Related topics"

lib/completions/dialects/chat_gpt.rb

Lines changed: 28 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,19 @@ def native_tool_support?
2020
def embed_user_ids?
2121
return @embed_user_ids if defined?(@embed_user_ids)
2222

23-
@embed_user_ids =
23+
@embed_user_ids = true if responses_api?
24+
25+
@embed_user_ids ||=
2426
prompt.messages.any? do |m|
2527
m[:id] && m[:type] == :user && !m[:id].to_s.match?(VALID_ID_REGEX)
2628
end
2729
end
2830

31+
def responses_api?
32+
return @responses_api if defined?(@responses_api)
33+
@responses_api = llm_model.lookup_custom_param("enable_responses_api")
34+
end
35+
2936
def max_prompt_tokens
3037
# provide a buffer of 120 tokens - our function counting is not
3138
# 100% accurate and getting numbers to align exactly is very hard
@@ -51,7 +58,11 @@ def tools_dialect
5158
if disable_native_tools?
5259
super
5360
else
54-
@tools_dialect ||= DiscourseAi::Completions::Dialects::OpenAiTools.new(prompt.tools)
61+
@tools_dialect ||=
62+
DiscourseAi::Completions::Dialects::OpenAiTools.new(
63+
prompt.tools,
64+
responses_api: responses_api?,
65+
)
5566
end
5667
end
5768

@@ -120,7 +131,7 @@ def user_msg(msg)
120131
to_encoded_content_array(
121132
content: content_array.flatten,
122133
image_encoder: ->(details) { image_node(details) },
123-
text_encoder: ->(text) { { type: "text", text: text } },
134+
text_encoder: ->(text) { text_node(text) },
124135
allow_vision: vision_support?,
125136
)
126137

@@ -136,13 +147,21 @@ def no_array_if_only_text(content_array)
136147
end
137148
end
138149

150+
def text_node(text)
151+
if responses_api?
152+
{ type: "input_text", text: text }
153+
else
154+
{ type: "text", text: text }
155+
end
156+
end
157+
139158
def image_node(details)
140-
{
141-
type: "image_url",
142-
image_url: {
143-
url: "data:#{details[:mime_type]};base64,#{details[:base64]}",
144-
},
145-
}
159+
encoded_image = "data:#{details[:mime_type]};base64,#{details[:base64]}"
160+
if responses_api?
161+
{ type: "input_image", image_url: encoded_image }
162+
else
163+
{ type: "image_url", image_url: { url: encoded_image } }
164+
end
146165
end
147166

148167
def per_message_overhead

lib/completions/dialects/open_ai_tools.rb

Lines changed: 47 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -4,20 +4,32 @@ module DiscourseAi
44
module Completions
55
module Dialects
66
class OpenAiTools
7-
def initialize(tools)
7+
def initialize(tools, responses_api: false)
8+
@responses_api = responses_api
89
@raw_tools = tools
910
end
1011

1112
def translated_tools
12-
raw_tools.map do |tool|
13-
{
14-
type: "function",
15-
function: {
13+
if @responses_api
14+
raw_tools.map do |tool|
15+
{
16+
type: "function",
1617
name: tool.name,
1718
description: tool.description,
1819
parameters: tool.parameters_json_schema,
19-
},
20-
}
20+
}
21+
end
22+
else
23+
raw_tools.map do |tool|
24+
{
25+
type: "function",
26+
function: {
27+
name: tool.name,
28+
description: tool.description,
29+
parameters: tool.parameters_json_schema,
30+
},
31+
}
32+
end
2133
end
2234
end
2335

@@ -30,20 +42,37 @@ def from_raw_tool_call(raw_message)
3042
call_details[:arguments] = call_details[:arguments].to_json
3143
call_details[:name] = raw_message[:name]
3244

33-
{
34-
role: "assistant",
35-
content: nil,
36-
tool_calls: [{ type: "function", function: call_details, id: raw_message[:id] }],
37-
}
45+
if @responses_api
46+
{
47+
type: "function_call",
48+
call_id: raw_message[:id],
49+
name: call_details[:name],
50+
arguments: call_details[:arguments],
51+
}
52+
else
53+
{
54+
role: "assistant",
55+
content: nil,
56+
tool_calls: [{ type: "function", function: call_details, id: raw_message[:id] }],
57+
}
58+
end
3859
end
3960

4061
def from_raw_tool(raw_message)
41-
{
42-
role: "tool",
43-
tool_call_id: raw_message[:id],
44-
content: raw_message[:content],
45-
name: raw_message[:name],
46-
}
62+
if @responses_api
63+
{
64+
type: "function_call_output",
65+
call_id: raw_message[:id],
66+
output: raw_message[:content],
67+
}
68+
else
69+
{
70+
role: "tool",
71+
tool_call_id: raw_message[:id],
72+
content: raw_message[:content],
73+
name: raw_message[:name],
74+
}
75+
end
4776
end
4877

4978
private

lib/completions/endpoints/open_ai.rb

Lines changed: 33 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -89,26 +89,47 @@ def prepare_payload(prompt, model_params, dialect)
8989
# We'll fallback to guess this using the tokenizer.
9090
payload[:stream_options] = { include_usage: true } if llm_model.provider == "open_ai"
9191
end
92+
9293
if !xml_tools_enabled?
9394
if dialect.tools.present?
9495
payload[:tools] = dialect.tools
9596
if dialect.tool_choice.present?
9697
if dialect.tool_choice == :none
9798
payload[:tool_choice] = "none"
9899
else
99-
payload[:tool_choice] = {
100-
type: "function",
101-
function: {
102-
name: dialect.tool_choice,
103-
},
104-
}
100+
if responses_api?
101+
payload[:tool_choice] = { type: "function", name: dialect.tool_choice }
102+
else
103+
payload[:tool_choice] = {
104+
type: "function",
105+
function: {
106+
name: dialect.tool_choice,
107+
},
108+
}
109+
end
105110
end
106111
end
107112
end
108113
end
114+
115+
convert_payload_to_responses_api!(payload) if responses_api?
116+
109117
payload
110118
end
111119

120+
def responses_api?
121+
return @responses_api if defined?(@responses_api)
122+
@responses_api = llm_model.lookup_custom_param("enable_responses_api")
123+
end
124+
125+
def convert_payload_to_responses_api!(payload)
126+
payload[:input] = payload.delete(:messages)
127+
completion_tokens = payload.delete(:max_completion_tokens) || payload.delete(:max_tokens)
128+
payload[:max_output_tokens] = completion_tokens if completion_tokens
129+
# not supported in responses api
130+
payload.delete(:stream_options)
131+
end
132+
112133
def prepare_request(payload)
113134
headers = { "Content-Type" => "application/json" }
114135
api_key = llm_model.api_key
@@ -159,7 +180,12 @@ def xml_tools_enabled?
159180
private
160181

161182
def processor
162-
@processor ||= OpenAiMessageProcessor.new(partial_tool_calls: partial_tool_calls)
183+
@processor ||=
184+
if responses_api?
185+
OpenAiResponsesMessageProcessor.new(partial_tool_calls: partial_tool_calls)
186+
else
187+
OpenAiMessageProcessor.new(partial_tool_calls: partial_tool_calls)
188+
end
163189
end
164190
end
165191
end

0 commit comments

Comments
 (0)