Skip to content

Commit a8077e9

Browse files
Merge pull request #817 from MervinPraison/claude/issue-815-20250711_070741
fix: resolve SyntaxError in autoagents.py
2 parents 09092dc + d91ffe5 commit a8077e9

File tree

1 file changed

+6
-33
lines changed

1 file changed

+6
-33
lines changed

src/praisonai-agents/praisonaiagents/agents/autoagents.py

Lines changed: 6 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -296,6 +296,10 @@ def _generate_config(self) -> AutoAgentsConfig:
296296
last_error = None
297297

298298
for attempt in range(max_retries):
299+
# Initialize variables for this attempt
300+
use_openai_structured = False
301+
client = None
302+
299303
# Prepare prompt for this attempt
300304
if attempt > 0 and last_response and last_error:
301305
# On retry, include the previous response and error
@@ -331,38 +335,7 @@ def _generate_config(self) -> AutoAgentsConfig:
331335
# If OpenAI client is not available, we'll use the LLM class
332336
pass
333337

334-
if use_openai_structured and client:
335-
# Use OpenAI's structured output for OpenAI models (backward compatibility)
336-
response = client.beta.chat.completions.parse(
337-
model=self.llm,
338-
response_format=AutoAgentsConfig,
339-
messages=[
340-
{"role": "system", "content": "You are a helpful assistant designed to generate AI agent configurations."},
341-
{"role": "user", "content": prompt}
342-
]
343-
)
344-
config = response.choices[0].message.parsed
345-
else:
346-
# Use LLM class for all other providers (Gemini, Anthropic, etc.)
347-
llm_instance = LLM(
348-
model=self.llm,
349-
base_url=self.base_url,
350-
api_key=self.api_key
351-
)
352-
353-
try:
354-
# Check if we have OpenAI API and the model supports structured output
355-
if self.llm and (self.llm.startswith('gpt-') or self.llm.startswith('o1-') or self.llm.startswith('o3-')):
356-
# Create a new client instance if custom parameters are provided
357-
if self.api_key or self.base_url:
358-
client = OpenAIClient(api_key=self.api_key, base_url=self.base_url)
359-
else:
360-
client = get_openai_client()
361-
use_openai_structured = True
362-
except:
363-
# If OpenAI client is not available, we'll use the LLM class
364-
pass
365-
338+
try:
366339
if use_openai_structured and client:
367340
# Use OpenAI's structured output for OpenAI models (backward compatibility)
368341
config = client.parse_structured_output(
@@ -383,7 +356,7 @@ def _generate_config(self) -> AutoAgentsConfig:
383356
api_key=self.api_key
384357
)
385358

386-
response_text = llm_instance.response(
359+
response_text = llm_instance.get_response(
387360
prompt=prompt,
388361
system_prompt="You are a helpful assistant designed to generate AI agent configurations.",
389362
output_pydantic=AutoAgentsConfig,

0 commit comments

Comments
 (0)