Skip to content

Commit d1b4720

Browse files
authored
Merge branch 'main' into add-dapr-session
2 parents 57e0e63 + a30c32e commit d1b4720

File tree

3 files changed

+27
-13
lines changed

3 files changed

+27
-13
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "openai-agents"
3-
version = "0.4.1"
3+
version = "0.4.2"
44
description = "OpenAI Agents SDK"
55
readme = "README.md"
66
requires-python = ">=3.9"

src/agents/extensions/models/litellm_model.py

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -110,18 +110,26 @@ async def get_response(
110110
prompt=prompt,
111111
)
112112

113-
assert isinstance(response.choices[0], litellm.types.utils.Choices)
113+
message: litellm.types.utils.Message | None = None
114+
first_choice: litellm.types.utils.Choices | None = None
115+
if response.choices and len(response.choices) > 0:
116+
choice = response.choices[0]
117+
if isinstance(choice, litellm.types.utils.Choices):
118+
first_choice = choice
119+
message = first_choice.message
114120

115121
if _debug.DONT_LOG_MODEL_DATA:
116122
logger.debug("Received model response")
117123
else:
118-
logger.debug(
119-
f"""LLM resp:\n{
120-
json.dumps(
121-
response.choices[0].message.model_dump(), indent=2, ensure_ascii=False
122-
)
123-
}\n"""
124-
)
124+
if message is not None:
125+
logger.debug(
126+
f"""LLM resp:\n{
127+
json.dumps(message.model_dump(), indent=2, ensure_ascii=False)
128+
}\n"""
129+
)
130+
else:
131+
finish_reason = first_choice.finish_reason if first_choice else "-"
132+
logger.debug(f"LLM resp had no message. finish_reason: {finish_reason}")
125133

126134
if hasattr(response, "usage"):
127135
response_usage = response.usage
@@ -152,14 +160,20 @@ async def get_response(
152160
logger.warning("No usage information returned from Litellm")
153161

154162
if tracing.include_data():
155-
span_generation.span_data.output = [response.choices[0].message.model_dump()]
163+
span_generation.span_data.output = (
164+
[message.model_dump()] if message is not None else []
165+
)
156166
span_generation.span_data.usage = {
157167
"input_tokens": usage.input_tokens,
158168
"output_tokens": usage.output_tokens,
159169
}
160170

161-
items = Converter.message_to_output_items(
162-
LitellmConverter.convert_message_to_openai(response.choices[0].message)
171+
items = (
172+
Converter.message_to_output_items(
173+
LitellmConverter.convert_message_to_openai(message)
174+
)
175+
if message is not None
176+
else []
163177
)
164178

165179
return ModelResponse(

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)