@@ -113,19 +113,22 @@ def convert_system_messages(
113113 messages : list [BaseMessage ], rules : LLMRules
114114) -> list [BaseMessage ]:
115115 """
116- Converts system messages to human messages if the LLM doesnt support system messages.
116+ Converts system messages to human messages if the LLM doesnt support system messages, either at all or in the first position .
117117 """
118- if not messages or not rules .require_system_message_first :
119- return messages
120118
121119 new_messages = []
122- for message in messages :
120+ for i , message in enumerate ( messages ) :
123121 if isinstance (message , SystemMessage ):
124- new_messages .append (
125- HumanMessage (
126- content = f"ORCHESTRATOR: { message .content } " , name = message .name
122+ # if system messages are not supported OR if they must be first and this is not the first message
123+ # THEN convert the message to a human message
124+ if not rules .allow_system_messages or (
125+ i > 0 and rules .require_system_message_first
126+ ):
127+ new_messages .append (
128+ HumanMessage (
129+ content = f"ORCHESTRATOR: { message .content } " , name = message .name
130+ )
127131 )
128- )
129132 else :
130133 new_messages .append (message )
131134 return new_messages
@@ -249,7 +252,9 @@ def compile_to_messages(self, agent: "Agent") -> list[BaseMessage]:
249252 messages = break_up_consecutive_ai_messages (messages , rules = context .llm_rules )
250253 messages = format_message_name (messages , rules = context .llm_rules )
251254
255+ messages = system_prompt + messages
256+
252257 # this should go last
253258 messages = convert_system_messages (messages , rules = context .llm_rules )
254259
255- return system_prompt + messages
260+ return messages
0 commit comments