File tree Expand file tree Collapse file tree 1 file changed +9
-1
lines changed
src/mcp_agent/workflows/llm Expand file tree Collapse file tree 1 file changed +9
-1
lines changed Original file line number Diff line number Diff line change @@ -486,8 +486,16 @@ async def generate_structured(
486486 "model" : model ,
487487 "messages" : messages ,
488488 "response_format" : response_format ,
489- "max_tokens" : params .maxTokens ,
490489 }
490+
491+ # Use max_completion_tokens for reasoning models, max_tokens for others
492+ if self ._reasoning (model ):
493+ # DEPRECATED: https://platform.openai.com/docs/api-reference/chat/create#chat-create-max_tokens
494+ # "max_tokens": params.maxTokens,
495+ payload ["max_completion_tokens" ] = params .maxTokens
496+ payload ["reasoning_effort" ] = self ._reasoning_effort
497+ else :
498+ payload ["max_tokens" ] = params .maxTokens
491499 user = params .user or getattr (self .context .config .openai , "user" , None )
492500 if user :
493501 payload ["user" ] = user
You can’t perform that action at this time.
0 commit comments