Skip to content

Commit d2b9c54

Browse files
fix: re-add openai response_format param, add test
1 parent a928cde commit d2b9c54

File tree

5 files changed

+403
-1
lines changed

5 files changed

+403
-1
lines changed

lib/crewai/src/crewai/llms/providers/openai/completion.py

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
from crewai.llms.base_llm import BaseLLM
1818
from crewai.llms.hooks.transport import HTTPTransport
1919
from crewai.utilities.agent_utils import is_context_length_exceeded
20+
from crewai.utilities.converter import generate_model_description
2021
from crewai.utilities.exceptions.context_window_exceeding_exception import (
2122
LLMContextLengthExceededError,
2223
)
@@ -245,6 +246,16 @@ def _prepare_completion_params(
245246
if self.is_o1_model and self.reasoning_effort:
246247
params["reasoning_effort"] = self.reasoning_effort
247248

249+
if self.response_format is not None:
250+
if isinstance(self.response_format, type) and issubclass(
251+
self.response_format, BaseModel
252+
):
253+
params["response_format"] = generate_model_description(
254+
self.response_format
255+
)
256+
elif isinstance(self.response_format, dict):
257+
params["response_format"] = self.response_format
258+
248259
if tools:
249260
params["tools"] = self._convert_tools_for_interference(tools)
250261
params["tool_choice"] = "auto"
@@ -303,8 +314,11 @@ def _handle_completion(
303314
"""Handle non-streaming chat completion."""
304315
try:
305316
if response_model:
317+
parse_params = {
318+
k: v for k, v in params.items() if k != "response_format"
319+
}
306320
parsed_response = self.client.beta.chat.completions.parse(
307-
**params,
321+
**parse_params,
308322
response_format=response_model,
309323
)
310324
math_reasoning = parsed_response.choices[0].message
Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"role":"user","content":"Say hello in one word"}],"model":"gpt-4o"}'
4+
headers:
5+
accept:
6+
- application/json
7+
accept-encoding:
8+
- gzip, deflate, zstd
9+
connection:
10+
- keep-alive
11+
content-length:
12+
- '81'
13+
content-type:
14+
- application/json
15+
host:
16+
- api.openai.com
17+
user-agent:
18+
- OpenAI/Python 1.109.1
19+
x-stainless-arch:
20+
- arm64
21+
x-stainless-async:
22+
- 'false'
23+
x-stainless-lang:
24+
- python
25+
x-stainless-os:
26+
- MacOS
27+
x-stainless-package-version:
28+
- 1.109.1
29+
x-stainless-read-timeout:
30+
- '600'
31+
x-stainless-retry-count:
32+
- '0'
33+
x-stainless-runtime:
34+
- CPython
35+
x-stainless-runtime-version:
36+
- 3.12.10
37+
method: POST
38+
uri: https://api.openai.com/v1/chat/completions
39+
response:
40+
body:
41+
string: !!binary |
42+
H4sIAAAAAAAAAwAAAP//jJJNT9wwEIbv+RXunDdVNtoPuteiCoEQJ7SiFYqMPcm6OB7LnvAhtP8d
43+
OWE3oYDUiw9+5h2/73heMiHAaNgIUDvJqvU2/1nf1K44vVzePG6vr5cPV2V5/nt7eqG36lcLs6Sg
44+
u7+o+KD6rqj1FtmQG7AKKBlT1/l6tSjKYr1a96AljTbJGs/5gvKyKBd5cZIXqzfhjozCCBvxJxNC
45+
iJf+TBadxifYiGJ2uGkxRtkgbI5FQkAgm25AxmgiS8cwG6Eix+h612doLX2bwoB1F2Xy5jprJ0A6
46+
RyxTtt7W7RvZH41Yanygu/iPFGrjTNxVAWUklx6NTB56us+EuO0Dd+8ygA/Ueq6Y7rF/bl4O7WCc
47+
8AgPjImlnWgWs0+aVRpZGhsn8wIl1Q71qByHKzttaAKySeSPXj7rPcQ2rvmf9iNQCj2jrnxAbdT7
48+
vGNZwLR+X5UdR9wbhojhwSis2GBI36Cxlp0dNgPic2Rsq9q4BoMPZliP2lfqxwkWSyXna8j22SsA
49+
AAD//wMAmJrFFCcDAAA=
50+
headers:
51+
CF-RAY:
52+
- 9a3c18dff8580f53-EWR
53+
Connection:
54+
- keep-alive
55+
Content-Encoding:
56+
- gzip
57+
Content-Type:
58+
- application/json
59+
Date:
60+
- Mon, 24 Nov 2025 21:46:08 GMT
61+
Server:
62+
- cloudflare
63+
Set-Cookie:
64+
- FILTERED
65+
Strict-Transport-Security:
66+
- max-age=31536000; includeSubDomains; preload
67+
Transfer-Encoding:
68+
- chunked
69+
X-Content-Type-Options:
70+
- nosniff
71+
access-control-expose-headers:
72+
- X-Request-ID
73+
alt-svc:
74+
- h3=":443"; ma=86400
75+
cf-cache-status:
76+
- DYNAMIC
77+
openai-organization:
78+
- FILTERED
79+
openai-processing-ms:
80+
- '1096'
81+
openai-project:
82+
- FILTERED
83+
openai-version:
84+
- '2020-10-01'
85+
x-envoy-upstream-service-time:
86+
- '1138'
87+
x-openai-proxy-wasm:
88+
- v0.1
89+
x-ratelimit-limit-project-requests:
90+
- '10000'
91+
x-ratelimit-limit-requests:
92+
- '10000'
93+
x-ratelimit-limit-tokens:
94+
- '30000000'
95+
x-ratelimit-remaining-project-requests:
96+
- '9999'
97+
x-ratelimit-remaining-requests:
98+
- '9999'
99+
x-ratelimit-remaining-tokens:
100+
- '29999992'
101+
x-ratelimit-reset-project-requests:
102+
- 6ms
103+
x-ratelimit-reset-requests:
104+
- 6ms
105+
x-ratelimit-reset-tokens:
106+
- 0s
107+
x-request-id:
108+
- req_670507131d6c455caf0e8cbc30a1a792
109+
status:
110+
code: 200
111+
message: OK
112+
version: 1
Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,113 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"role":"user","content":"Return a JSON object with a ''status''
4+
field set to ''success''"}],"model":"gpt-4o","response_format":{"type":"json_object"}}'
5+
headers:
6+
accept:
7+
- application/json
8+
accept-encoding:
9+
- gzip, deflate, zstd
10+
connection:
11+
- keep-alive
12+
content-length:
13+
- '160'
14+
content-type:
15+
- application/json
16+
host:
17+
- api.openai.com
18+
user-agent:
19+
- OpenAI/Python 1.109.1
20+
x-stainless-arch:
21+
- arm64
22+
x-stainless-async:
23+
- 'false'
24+
x-stainless-lang:
25+
- python
26+
x-stainless-os:
27+
- MacOS
28+
x-stainless-package-version:
29+
- 1.109.1
30+
x-stainless-read-timeout:
31+
- '600'
32+
x-stainless-retry-count:
33+
- '0'
34+
x-stainless-runtime:
35+
- CPython
36+
x-stainless-runtime-version:
37+
- 3.12.10
38+
method: POST
39+
uri: https://api.openai.com/v1/chat/completions
40+
response:
41+
body:
42+
string: !!binary |
43+
H4sIAAAAAAAAA4xSwW6cMBC98xXWnJeKkC274Zr01vbUVopKhLxmACfGdj1D1Wi1/14ZNgtpU6kX
44+
hObNe7z3mGMiBOgGSgGql6wGb9Lb9r4191/46eaD+/j509f9Lvs2PP+47u/usIdNZLjDIyp+Yb1T
45+
bvAGWTs7wyqgZIyqV7tim+XZrng/AYNr0ERa5zndujTP8m2a7dOsOBN7pxUSlOJ7IoQQx+kZLdoG
46+
f0Epss3LZEAi2SGUlyUhIDgTJyCJNLG0DJsFVM4y2sn1sbJCVEAseaQKyvg+KoVEFVT2tGYFbEeS
47+
0bQdjVkB0lrHMoae/D6ckdPFoXGdD+5Af1Ch1VZTXweU5Gx0Q+w8TOgpEeJhamJ8FQ58cIPnmt0T
48+
Tp/L81kOluoX8OaMsWNplvH11eYNsbpBltrQqkhQUvXYLMyldTk22q2AZBX5by9vac+xte3+R34B
49+
lELP2NQ+YKPV67zLWsB4l/9au1Q8GQbC8FMrrFljiL+hwVaOZj4ZoGdiHOpW2w6DD3q+m9bXO8TD
50+
tmizYg/JKfkNAAD//wMA0CE0wkADAAA=
51+
headers:
52+
CF-RAY:
53+
- 9a3c18d7de3c80dc-EWR
54+
Connection:
55+
- keep-alive
56+
Content-Encoding:
57+
- gzip
58+
Content-Type:
59+
- application/json
60+
Date:
61+
- Mon, 24 Nov 2025 21:46:06 GMT
62+
Server:
63+
- cloudflare
64+
Set-Cookie:
65+
- FILTERED
66+
Strict-Transport-Security:
67+
- max-age=31536000; includeSubDomains; preload
68+
Transfer-Encoding:
69+
- chunked
70+
X-Content-Type-Options:
71+
- nosniff
72+
access-control-expose-headers:
73+
- X-Request-ID
74+
alt-svc:
75+
- h3=":443"; ma=86400
76+
cf-cache-status:
77+
- DYNAMIC
78+
openai-organization:
79+
- FILTERED
80+
openai-processing-ms:
81+
- '424'
82+
openai-project:
83+
- FILTERED
84+
openai-version:
85+
- '2020-10-01'
86+
x-envoy-upstream-service-time:
87+
- '443'
88+
x-openai-proxy-wasm:
89+
- v0.1
90+
x-ratelimit-limit-project-requests:
91+
- '10000'
92+
x-ratelimit-limit-requests:
93+
- '10000'
94+
x-ratelimit-limit-tokens:
95+
- '30000000'
96+
x-ratelimit-remaining-project-requests:
97+
- '9999'
98+
x-ratelimit-remaining-requests:
99+
- '9999'
100+
x-ratelimit-remaining-tokens:
101+
- '29999983'
102+
x-ratelimit-reset-project-requests:
103+
- 6ms
104+
x-ratelimit-reset-requests:
105+
- 6ms
106+
x-ratelimit-reset-tokens:
107+
- 0s
108+
x-request-id:
109+
- req_71bc4c9f29f843d6b3788b119850dfde
110+
status:
111+
code: 200
112+
message: OK
113+
version: 1
Lines changed: 116 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,116 @@
1+
interactions:
2+
- request:
3+
body: '{"messages":[{"role":"user","content":"What is the capital of France? Be
4+
concise."}],"model":"gpt-4o","response_format":{"type":"json_schema","json_schema":{"name":"AnswerResponse","strict":true,"schema":{"description":"Response
5+
model with structured fields.","properties":{"answer":{"description":"The answer
6+
to the question","title":"Answer","type":"string"},"confidence":{"description":"Confidence
7+
score between 0 and 1","title":"Confidence","type":"number"}},"required":["answer","confidence"],"title":"AnswerResponse","type":"object","additionalProperties":false}}}}'
8+
headers:
9+
accept:
10+
- application/json
11+
accept-encoding:
12+
- gzip, deflate, zstd
13+
connection:
14+
- keep-alive
15+
content-length:
16+
- '571'
17+
content-type:
18+
- application/json
19+
host:
20+
- api.openai.com
21+
user-agent:
22+
- OpenAI/Python 1.109.1
23+
x-stainless-arch:
24+
- arm64
25+
x-stainless-async:
26+
- 'false'
27+
x-stainless-lang:
28+
- python
29+
x-stainless-os:
30+
- MacOS
31+
x-stainless-package-version:
32+
- 1.109.1
33+
x-stainless-read-timeout:
34+
- '600'
35+
x-stainless-retry-count:
36+
- '0'
37+
x-stainless-runtime:
38+
- CPython
39+
x-stainless-runtime-version:
40+
- 3.12.10
41+
method: POST
42+
uri: https://api.openai.com/v1/chat/completions
43+
response:
44+
body:
45+
string: !!binary |
46+
H4sIAAAAAAAAAwAAAP//jFLLbtswELzrK4g9SwFtyA/pmKCH9pRbUVSBwJArmTVFElyqbWr43wvK
47+
jqW0KdALDzs7w5ndPWWMgVZQM5AHEeXgTfHQfemO1f0H8cK73fDp82b/iz6uH4+WnC0hTwz3/A1l
48+
fGXdSTd4g1E7e4FlQBExqa5225Kv+W5bTsDgFJpE630sSles+bos+L7g2yvx4LREgpp9zRhj7DS9
49+
yaJV+BNqxvPXyoBEokeob02MQXAmVUAQaYrCRshnUDob0U6uTw0ISz8wNFA38CiCpgbyJrV0WqGV
50+
2EDN76rqvBQI2I0kkn87GrMAhLUuipR/sv50Rc43s8b1Prhn+oMKnbaaDm1AQc4mYxSdhwk9Z4w9
51+
TUMZ3+QEH9zgYxvdEafvqvIiB/MWZnC1uoLRRWEWdb7J35FrFUahDS2mClLIA6qZOq9AjEq7BZAt
52+
Qv/t5j3tS3Bt+/+RnwEp0UdUrQ+otHybeG4LmI70X223IU+GgTB81xLbqDGkRSjsxGgu9wP0QhGH
53+
ttO2x+CDvhxR51tZ7ZFvpFjtIDtnvwEAAP//AwAvoKedTQMAAA==
54+
headers:
55+
CF-RAY:
56+
- 9a3c18cf7fe04253-EWR
57+
Connection:
58+
- keep-alive
59+
Content-Encoding:
60+
- gzip
61+
Content-Type:
62+
- application/json
63+
Date:
64+
- Mon, 24 Nov 2025 21:46:05 GMT
65+
Server:
66+
- cloudflare
67+
Set-Cookie:
68+
- FILTERED
69+
Strict-Transport-Security:
70+
- max-age=31536000; includeSubDomains; preload
71+
Transfer-Encoding:
72+
- chunked
73+
X-Content-Type-Options:
74+
- nosniff
75+
access-control-expose-headers:
76+
- X-Request-ID
77+
alt-svc:
78+
- h3=":443"; ma=86400
79+
cf-cache-status:
80+
- DYNAMIC
81+
openai-organization:
82+
- FILTERED
83+
openai-processing-ms:
84+
- '448'
85+
openai-project:
86+
- FILTERED
87+
openai-version:
88+
- '2020-10-01'
89+
x-envoy-upstream-service-time:
90+
- '465'
91+
x-openai-proxy-wasm:
92+
- v0.1
93+
x-ratelimit-limit-project-requests:
94+
- '10000'
95+
x-ratelimit-limit-requests:
96+
- '10000'
97+
x-ratelimit-limit-tokens:
98+
- '30000000'
99+
x-ratelimit-remaining-project-requests:
100+
- '9999'
101+
x-ratelimit-remaining-requests:
102+
- '9999'
103+
x-ratelimit-remaining-tokens:
104+
- '29999987'
105+
x-ratelimit-reset-project-requests:
106+
- 6ms
107+
x-ratelimit-reset-requests:
108+
- 6ms
109+
x-ratelimit-reset-tokens:
110+
- 0s
111+
x-request-id:
112+
- req_765510cb1e614ed6a83e665bf7c5a07b
113+
status:
114+
code: 200
115+
message: OK
116+
version: 1

0 commit comments

Comments
 (0)