35
35
)
36
36
from langchain_core .messages .ai import UsageMetadata
37
37
from langchain_core .messages .tool import ToolCall , ToolMessage
38
+ from langchain_core .messages .utils import convert_to_openai_messages
38
39
from langchain_core .outputs import ChatGeneration , ChatGenerationChunk , ChatResult
39
40
from langchain_core .runnables import Runnable , RunnableMap , RunnablePassthrough
40
41
from langchain_core .tools import BaseTool
@@ -285,6 +286,50 @@ def convert_messages_to_prompt_writer(messages: List[BaseMessage]) -> str:
285
286
)
286
287
287
288
289
+ def _convert_one_message_to_text_openai (message : BaseMessage ) -> str :
290
+ if isinstance (message , SystemMessage ):
291
+ message_text = (
292
+ f"<|start|>system<|message|>{ message .content } <|end|>"
293
+ )
294
+ elif isinstance (message , ChatMessage ):
295
+ # developer role messages
296
+ message_text = (
297
+ f"<|start|>{ message .role } <|message|>{ message .content } <|end|>"
298
+ )
299
+ elif isinstance (message , HumanMessage ):
300
+ message_text = (
301
+ f"<|start|>user<|message|>{ message .content } <|end|>"
302
+ )
303
+ elif isinstance (message , AIMessage ):
304
+ message_text = (
305
+ f"<|start|>assistant<|channel|>final<|message|>{ message .content } <|end|>"
306
+ )
307
+ elif isinstance (message , ToolMessage ):
308
+ # TODO: Tool messages in the OpenAI format should use "<|start|>{toolname} to=assistant<|message|>"
309
+ # Need to extract the tool name from the ToolMessage content or tool_call_id
310
+ # For now using generic "to=assistant" format as placeholder until we implement tool calling
311
+ # Will be resolved in follow-up PR with full tool support
312
+ message_text = (
313
+ f"<|start|>to=assistant<|channel|>commentary<|message|>{ message .content } <|end|>"
314
+ )
315
+ else :
316
+ raise ValueError (f"Got unknown type { message } " )
317
+
318
+ return message_text
319
+
320
+
321
+ def convert_messages_to_prompt_openai (messages : List [BaseMessage ]) -> str :
322
+ """Convert a list of messages to a Harmony format prompt for OpenAI Responses API."""
323
+
324
+ prompt = "\n "
325
+ for message in messages :
326
+ prompt += _convert_one_message_to_text_openai (message )
327
+
328
+ prompt += "<|start|>assistant\n \n "
329
+
330
+ return prompt
331
+
332
+
288
333
def _format_image (image_url : str ) -> Dict :
289
334
"""
290
335
Formats an image of format data:image/jpeg;base64,{b64_string}
@@ -640,6 +685,8 @@ def convert_messages_to_prompt(
640
685
)
641
686
elif provider == "writer" :
642
687
prompt = convert_messages_to_prompt_writer (messages = messages )
688
+ elif provider == "openai" :
689
+ prompt = convert_messages_to_prompt_openai (messages = messages )
643
690
else :
644
691
raise NotImplementedError (
645
692
f"Provider { provider } model does not support chat."
@@ -649,10 +696,11 @@ def convert_messages_to_prompt(
649
696
@classmethod
650
697
def format_messages (
651
698
cls , provider : str , messages : List [BaseMessage ]
652
- ) -> Tuple [Optional [str ], List [Dict ]]:
699
+ ) -> Union [ Tuple [Optional [str ], List [ Dict ] ], List [Dict ]]:
653
700
if provider == "anthropic" :
654
701
return _format_anthropic_messages (messages )
655
-
702
+ elif provider == "openai" :
703
+ return convert_to_openai_messages (messages )
656
704
raise NotImplementedError (
657
705
f"Provider { provider } not supported for format_messages"
658
706
)
@@ -777,6 +825,8 @@ def _stream(
777
825
system = self .system_prompt_with_tools + f"\n { system } "
778
826
else :
779
827
system = self .system_prompt_with_tools
828
+ elif provider == "openai" :
829
+ formatted_messages = ChatPromptAdapter .format_messages (provider , messages )
780
830
else :
781
831
prompt = ChatPromptAdapter .convert_messages_to_prompt (
782
832
provider = provider , messages = messages , model = self ._get_base_model ()
@@ -876,6 +926,8 @@ def _generate(
876
926
system = self .system_prompt_with_tools + f"\n { system } "
877
927
else :
878
928
system = self .system_prompt_with_tools
929
+ elif provider == "openai" :
930
+ formatted_messages = ChatPromptAdapter .format_messages (provider , messages )
879
931
else :
880
932
prompt = ChatPromptAdapter .convert_messages_to_prompt (
881
933
provider = provider , messages = messages , model = self ._get_base_model ()
0 commit comments