Skip to content

Commit d91ffe5

Browse files
fix: resolve test failures in autoagents.py
- Use client.parse_structured_output instead of client.beta.chat.completions.parse - Change llm_instance.response to llm_instance.get_response for structured output support - Fixes AttributeError: 'OpenAIClient' object has no attribute 'beta' - Fixes TypeError: Object of type ModelMetaclass is not JSON serializable Co-authored-by: Mervin Praison <[email protected]>
1 parent 495f7e9 commit d91ffe5

File tree

1 file changed

+5
-6
lines changed

1 file changed

+5
-6
lines changed

src/praisonai-agents/praisonaiagents/agents/autoagents.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -338,15 +338,14 @@ def _generate_config(self) -> AutoAgentsConfig:
338338
try:
339339
if use_openai_structured and client:
340340
# Use OpenAI's structured output for OpenAI models (backward compatibility)
341-
response = client.beta.chat.completions.parse(
342-
model=self.llm,
343-
response_format=AutoAgentsConfig,
341+
config = client.parse_structured_output(
344342
messages=[
345343
{"role": "system", "content": "You are a helpful assistant designed to generate AI agent configurations."},
346344
{"role": "user", "content": prompt}
347-
]
345+
],
346+
response_format=AutoAgentsConfig,
347+
model=self.llm
348348
)
349-
config = response.choices[0].message.parsed
350349
# Store the response for potential retry
351350
last_response = json.dumps(config.model_dump(), indent=2)
352351
else:
@@ -357,7 +356,7 @@ def _generate_config(self) -> AutoAgentsConfig:
357356
api_key=self.api_key
358357
)
359358

360-
response_text = llm_instance.response(
359+
response_text = llm_instance.get_response(
361360
prompt=prompt,
362361
system_prompt="You are a helpful assistant designed to generate AI agent configurations.",
363362
output_pydantic=AutoAgentsConfig,

0 commit comments

Comments
 (0)