Skip to content

Commit 5b7b328

Browse files
authored
feat: Allow to contains files in the system prompt even model not support. (langgenius#11111)
1 parent 8d5a1be commit 5b7b328

File tree

3 files changed

+19
-9
lines changed
  • api/core

3 files changed

+19
-9
lines changed

api/core/model_runtime/model_providers/anthropic/llm/llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ def _to_credential_kwargs(self, credentials: dict) -> dict:
453453

454454
return credentials_kwargs
455455

456-
def _convert_prompt_messages(self, prompt_messages: list[PromptMessage]) -> tuple[str, list[dict]]:
456+
def _convert_prompt_messages(self, prompt_messages: Sequence[PromptMessage]) -> tuple[str, list[dict]]:
457457
"""
458458
Convert prompt messages to dict list and system
459459
"""

api/core/model_runtime/model_providers/openai/llm/llm.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -943,6 +943,9 @@ def _convert_prompt_message_to_dict(self, message: PromptMessage) -> dict:
943943
}
944944
elif isinstance(message, SystemPromptMessage):
945945
message = cast(SystemPromptMessage, message)
946+
if isinstance(message.content, list):
947+
text_contents = filter(lambda c: isinstance(c, TextPromptMessageContent), message.content)
948+
message.content = "".join(c.data for c in text_contents)
946949
message_dict = {"role": "system", "content": message.content}
947950
elif isinstance(message, ToolPromptMessage):
948951
message = cast(ToolPromptMessage, message)

api/core/workflow/nodes/llm/node.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage
2121
from core.model_runtime.entities.message_entities import (
2222
AssistantPromptMessage,
23+
PromptMessageContent,
2324
PromptMessageRole,
2425
SystemPromptMessage,
2526
UserPromptMessage,
@@ -828,14 +829,14 @@ def get_default_config(cls, filters: Optional[dict] = None) -> dict:
828829
}
829830

830831

831-
def _combine_text_message_with_role(*, text: str, role: PromptMessageRole):
832+
def _combine_message_content_with_role(*, contents: Sequence[PromptMessageContent], role: PromptMessageRole):
832833
match role:
833834
case PromptMessageRole.USER:
834-
return UserPromptMessage(content=[TextPromptMessageContent(data=text)])
835+
return UserPromptMessage(content=contents)
835836
case PromptMessageRole.ASSISTANT:
836-
return AssistantPromptMessage(content=[TextPromptMessageContent(data=text)])
837+
return AssistantPromptMessage(content=contents)
837838
case PromptMessageRole.SYSTEM:
838-
return SystemPromptMessage(content=[TextPromptMessageContent(data=text)])
839+
return SystemPromptMessage(content=contents)
839840
raise NotImplementedError(f"Role {role} is not supported")
840841

841842

@@ -877,7 +878,9 @@ def _handle_list_messages(
877878
jinjia2_variables=jinja2_variables,
878879
variable_pool=variable_pool,
879880
)
880-
prompt_message = _combine_text_message_with_role(text=result_text, role=message.role)
881+
prompt_message = _combine_message_content_with_role(
882+
contents=[TextPromptMessageContent(data=result_text)], role=message.role
883+
)
881884
prompt_messages.append(prompt_message)
882885
else:
883886
# Get segment group from basic message
@@ -908,12 +911,14 @@ def _handle_list_messages(
908911
# Create message with text from all segments
909912
plain_text = segment_group.text
910913
if plain_text:
911-
prompt_message = _combine_text_message_with_role(text=plain_text, role=message.role)
914+
prompt_message = _combine_message_content_with_role(
915+
contents=[TextPromptMessageContent(data=plain_text)], role=message.role
916+
)
912917
prompt_messages.append(prompt_message)
913918

914919
if file_contents:
915920
# Create message with image contents
916-
prompt_message = UserPromptMessage(content=file_contents)
921+
prompt_message = _combine_message_content_with_role(contents=file_contents, role=message.role)
917922
prompt_messages.append(prompt_message)
918923

919924
return prompt_messages
@@ -1018,6 +1023,8 @@ def _handle_completion_template(
10181023
else:
10191024
template_text = template.text
10201025
result_text = variable_pool.convert_template(template_text).text
1021-
prompt_message = _combine_text_message_with_role(text=result_text, role=PromptMessageRole.USER)
1026+
prompt_message = _combine_message_content_with_role(
1027+
contents=[TextPromptMessageContent(data=result_text)], role=PromptMessageRole.USER
1028+
)
10221029
prompt_messages.append(prompt_message)
10231030
return prompt_messages

0 commit comments

Comments
 (0)