Skip to content

Commit b8ef736

Browse files
Testcases
1 parent e31db2d commit b8ef736

File tree

6 files changed

+565
-11
lines changed

6 files changed

+565
-11
lines changed

src/backend/agents/agentutils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
AzureOpenAIChatCompletionClient)
55
from pydantic import BaseModel
66

7-
from context.cosmos_memory import CosmosBufferedChatCompletionContext
7+
from src.backend.context.cosmos_memory import CosmosBufferedChatCompletionContext
88
from models.messages import InputTask, PlanStatus, Step, StepStatus
99

1010
common_agent_system_message = "If you do not have the information for the arguments of the function you need to call, do not call the function. Instead, respond back to the user requesting further information. You must not hallucinate or invent any of the information used as arguments in the function. For example, if you need to call a function that requires a delivery address, you must not generate 123 Example St. You must skip calling functions and return a clarification message along the lines of: Sorry, I'm missing some information I need to help you with that. Could you please provide the delivery address so I can do that for you?"

src/backend/agents/tech_support.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@
66
from autogen_core.components.tools import FunctionTool, Tool
77
from typing_extensions import Annotated
88

9-
from agents.base_agent import BaseAgent
10-
from context.cosmos_memory import CosmosBufferedChatCompletionContext
9+
from src.backend.agents.base_agent import BaseAgent
10+
from src.backend.context.cosmos_memory import CosmosBufferedChatCompletionContext
1111

1212
formatting_instructions = "Instructions: returning the output of this function call verbatim to the user in markdown. Then write AGENT SUMMARY: and then include a summary of what you did."
1313

src/backend/context/cosmos_memory.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -244,13 +244,13 @@ async def get_messages(self) -> List[LLMMessage]:
244244
content = item.get("content", {})
245245
message_type = content.get("type")
246246
if message_type == "SystemMessage":
247-
message = SystemMessage(**content)
247+
SystemMessage.model_validate(content)
248248
elif message_type == "UserMessage":
249-
message = UserMessage(**content)
249+
message = UserMessage.model_validate(content)
250250
elif message_type == "AssistantMessage":
251-
message = AssistantMessage(**content)
251+
message = AssistantMessage.model_validate(content)
252252
elif message_type == "FunctionExecutionResultMessage":
253-
message = FunctionExecutionResultMessage(**content)
253+
message = FunctionExecutionResultMessage.model_validate(content)
254254
else:
255255
continue
256256
messages.append(message)

src/backend/models/messages.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -265,13 +265,13 @@ def from_dict(data: dict) -> "GroupChatMessage":
265265
body_type = body_data.pop("type")
266266

267267
if body_type == "SystemMessage":
268-
body = SystemMessage(**body_data)
268+
body = SystemMessage.from_dict(body_data)
269269
elif body_type == "UserMessage":
270-
body = UserMessage(**body_data)
270+
body = UserMessage.from_dict(body_data)
271271
elif body_type == "AssistantMessage":
272-
body = AssistantMessage(**body_data)
272+
body = AssistantMessage.from_dict(body_data)
273273
elif body_type == "FunctionExecutionResultMessage":
274-
body = FunctionExecutionResultMessage(**body_data)
274+
body = FunctionExecutionResultMessage.from_dict(body_data)
275275
else:
276276
raise ValueError(f"Unknown message type: {body_type}")
277277

0 commit comments

Comments
 (0)