Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 17 additions & 16 deletions src/backend/app_kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,20 +35,20 @@
ActionRequest,
ActionResponse,
)
from utils_kernel import initialize_runtime_and_context, get_agents, retrieve_all_agent_tools, rai_success
from utils_kernel import initialize_runtime_and_context, get_agents, rai_success
from event_utils import track_event_if_configured
from models.agent_types import AgentType
from kernel_agents.agent_factory import AgentFactory

# Check if the Application Insights Instrumentation Key is set in the environment variables
instrumentation_key = os.getenv("APPLICATIONINSIGHTS_INSTRUMENTATION_KEY")
if instrumentation_key:
# Configure Application Insights if the Instrumentation Key is found
configure_azure_monitor(connection_string=instrumentation_key)
logging.info("Application Insights configured with the provided Instrumentation Key")
else:
# Log a warning if the Instrumentation Key is not found
logging.warning("No Application Insights Instrumentation Key found. Skipping configuration")
# # Check if the Application Insights Instrumentation Key is set in the environment variables
# instrumentation_key = os.getenv("APPLICATIONINSIGHTS_INSTRUMENTATION_KEY")
# if instrumentation_key:
# # Configure Application Insights if the Instrumentation Key is found
# configure_azure_monitor(connection_string=instrumentation_key)
# logging.info("Application Insights configured with the provided Instrumentation Key")
# else:
# # Log a warning if the Instrumentation Key is not found
# logging.warning("No Application Insights Instrumentation Key found. Skipping configuration")

# Configure logging
logging.basicConfig(level=logging.INFO)
Expand All @@ -59,10 +59,10 @@
)
logging.getLogger("azure.identity.aio._internal").setLevel(logging.WARNING)

# Suppress info logs from OpenTelemetry exporter
logging.getLogger("azure.monitor.opentelemetry.exporter.export._base").setLevel(
logging.WARNING
)
# # Suppress info logs from OpenTelemetry exporter
# logging.getLogger("azure.monitor.opentelemetry.exporter.export._base").setLevel(
# logging.WARNING
# )

# Initialize the FastAPI app
app = FastAPI()
Expand Down Expand Up @@ -132,9 +132,10 @@ async def input_task_endpoint(input_task: InputTask, request: Request):
input_task_data["user_id"] = user_id
input_task_json = json.dumps(input_task_data)

logging.info(f"Input task: {input_task}")
# Use the planner to handle the task
result = await planner_agent.handle_input_task(
KernelArguments(input_task_json=input_task_json)
input_task
)

print(f"Result: {result}")
Expand Down Expand Up @@ -819,7 +820,7 @@ async def get_agent_tools():
type: string
description: Arguments required by the tool function
"""
return retrieve_all_agent_tools()
return []


# Initialize the application when it starts
Expand Down
31 changes: 7 additions & 24 deletions src/backend/config_kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,13 @@
import logging
import semantic_kernel as sk
from semantic_kernel.kernel import Kernel
from semantic_kernel.contents import ChatHistory
# Updated imports for compatibility
try:
# Try newer structure
from semantic_kernel.contents import ChatHistory
except ImportError:
# Fall back to older structure for compatibility
from semantic_kernel.connectors.ai.chat_completion_client import ChatHistory
from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent

# Import AppConfig from app_config
Expand Down Expand Up @@ -54,26 +60,3 @@ def CreateKernel():
def GetAIProjectClient():
"""Get an AIProjectClient using the AppConfig implementation."""
return config.get_ai_project_client()

@staticmethod
async def CreateAzureAIAgent(
kernel: Kernel,
agent_name: str,
instructions: str,
agent_type: str = "assistant",
tools=None,
tool_resources=None,
response_format=None,
temperature: float = 0.0
):
"""Creates a new Azure AI Agent using the AppConfig implementation."""
return await config.create_azure_ai_agent(
kernel=kernel,
agent_name=agent_name,
instructions=instructions,
agent_type=agent_type,
tools=tools,
tool_resources=tool_resources,
response_format=response_format,
temperature=temperature
)
105 changes: 105 additions & 0 deletions src/backend/kernel_agents/agent_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,24 @@
from semantic_kernel.functions.kernel_arguments import KernelArguments
from semantic_kernel.agents.azure_ai.azure_ai_agent import AzureAIAgent

# Updated imports for compatibility
try:
# Try importing from newer structure first
from semantic_kernel.contents import ChatMessageContent, ChatHistory
except ImportError:
# Fall back to older structure for compatibility
class ChatMessageContent:
"""Compatibility class for older SK versions."""
def __init__(self, role="", content="", name=None):
self.role = role
self.content = content
self.name = name

class ChatHistory:
"""Compatibility class for older SK versions."""
def __init__(self):
self.messages = []

from context.cosmos_memory_kernel import CosmosMemoryContext
from models.messages_kernel import (
ActionRequest,
Expand Down Expand Up @@ -64,6 +82,7 @@ def __init__(
else:
tools = tools or []
system_message = system_message or self._default_system_message(agent_name)

# Call AzureAIAgent constructor with required client and definition
super().__init__(
kernel=kernel,
Expand All @@ -76,6 +95,8 @@ def __init__(
client=client,
definition=definition
)

# Store instance variables
self._agent_name = agent_name
self._kernel = kernel
self._session_id = session_id
Expand All @@ -84,8 +105,14 @@ def __init__(
self._tools = tools
self._system_message = system_message
self._chat_history = [{"role": "system", "content": self._system_message}]
self._agent = None # Will be initialized in async_init

# Required properties for AgentGroupChat compatibility
self.name = agent_name # This is crucial for AgentGroupChat to identify agents

# Log initialization
logging.info(f"Initialized {agent_name} with {len(self._tools)} tools")

# Register the handler functions
self._register_functions()

Expand All @@ -107,6 +134,53 @@ async def async_init(self):
# Tools are registered with the kernel via get_tools_from_config
return self

async def invoke_async(self, *args, **kwargs):
"""Invoke this agent asynchronously.

This method is required for compatibility with AgentGroupChat.

Args:
*args: Positional arguments
**kwargs: Keyword arguments

Returns:
The agent's response
"""
# Ensure agent is initialized
if self._agent is None:
await self.async_init()

# Get the text input from args or kwargs
text = None
if args and isinstance(args[0], str):
text = args[0]
elif "text" in kwargs:
text = kwargs["text"]
elif "arguments" in kwargs and hasattr(kwargs["arguments"], "get"):
text = kwargs["arguments"].get("text") or kwargs["arguments"].get("input")

if not text:
settings = kwargs.get("settings", {})
if isinstance(settings, dict) and "input" in settings:
text = settings["input"]

# If text is still not found, create a default message
if not text:
text = "Hello, please assist with a task."

# Use the text to invoke the agent
try:
logging.info(f"Invoking {self._agent_name} with text: {text[:100]}...")
response = await self._agent.invoke(
self._kernel,
text,
settings=kwargs.get("settings", {})
)
return response
except Exception as e:
logging.error(f"Error invoking {self._agent_name}: {e}")
return f"Error: {str(e)}"

def _register_functions(self):
"""Register this agent's functions with the kernel."""
# Use the kernel function decorator approach instead of from_native_method
Expand All @@ -126,6 +200,37 @@ async def handle_action_request_wrapper(*args, **kwargs):
kernel_func = KernelFunction.from_method(handle_action_request_wrapper)
# Use agent name as plugin for handler
self._kernel.add_function(self._agent_name, kernel_func)

# Required method for AgentGroupChat compatibility
async def send_message_async(self, message_content: ChatMessageContent, chat_history: ChatHistory):
"""Send a message to the agent asynchronously, adding it to chat history.

Args:
message_content: The content of the message
chat_history: The chat history

Returns:
None
"""
# Convert message to format expected by the agent
if hasattr(message_content, "role") and hasattr(message_content, "content"):
self._chat_history.append({
"role": message_content.role,
"content": message_content.content
})

# If chat history is provided, update our internal history
if chat_history and hasattr(chat_history, "messages"):
# Update with the latest messages from chat history
for msg in chat_history.messages[-5:]: # Only use last 5 messages to avoid history getting too long
if msg not in self._chat_history:
self._chat_history.append({
"role": msg.role,
"content": msg.content
})

# No need to return anything as we're just updating state
return None

async def handle_action_request(self, action_request_json: str) -> str:
"""Handle an action request from another agent or the system.
Expand Down
73 changes: 17 additions & 56 deletions src/backend/kernel_agents/agent_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,14 @@
from kernel_agents.product_agent import ProductAgent
from kernel_agents.planner_agent import PlannerAgent # Add PlannerAgent import
from kernel_agents.group_chat_manager import GroupChatManager

from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig
from context.cosmos_memory_kernel import CosmosMemoryContext
from models.messages_kernel import PlannerResponsePlan


from azure.ai.projects.models import (
ResponseFormatJsonSchema,
ResponseFormatJsonSchemaType,
)
logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -108,6 +112,7 @@ async def create_agent(
user_id: str,
temperature: float = 0.0,
system_message: Optional[str] = None,
response_format: Optional[Any] = None,
**kwargs
) -> BaseAgent:
"""Create an agent of the specified type.
Expand Down Expand Up @@ -174,7 +179,7 @@ async def create_agent(
name=agent_type_str,
instructions=system_message,
temperature=temperature,
response_format=None # Add response_format if required
response_format=response_format # Add response_format if required
)
logger.info(f"Successfully created agent definition for {agent_type_str}")
except Exception as agent_exc:
Expand Down Expand Up @@ -224,57 +229,6 @@ async def create_agent(

return agent

@classmethod
async def create_azure_ai_agent(
cls,
agent_name: str,
session_id: str,
system_prompt: str,
tools: List[KernelFunction] = None
) -> AzureAIAgent:
"""Create an Azure AI Agent.

Args:
agent_name: The name of the agent
session_id: The session ID
system_prompt: The system prompt for the agent
tools: Optional list of tools for the agent

Returns:
An Azure AI Agent instance
"""
# Check if we already have an agent in the cache
cache_key = f"{session_id}_{agent_name}"
if session_id in cls._azure_ai_agent_cache and cache_key in cls._azure_ai_agent_cache[session_id]:
# If tools are provided, make sure they are registered with the cached agent
agent = cls._azure_ai_agent_cache[session_id][cache_key]
if tools:
for tool in tools:
agent.add_function(tool)
return agent

# Create a kernel using the AppConfig instance
kernel = config.create_kernel()

# Await creation since create_azure_ai_agent is async
agent = await config.create_azure_ai_agent(
kernel=kernel,
agent_name=agent_name,
instructions=system_prompt
)

# Register tools if provided
if tools:
for tool in tools:
agent.add_function(tool)

# Cache the agent instance
if session_id not in cls._azure_ai_agent_cache:
cls._azure_ai_agent_cache[session_id] = {}
cls._azure_ai_agent_cache[session_id][cache_key] = agent

return agent

@classmethod
async def _load_tools_for_agent(cls, kernel: Kernel, agent_type: str) -> List[KernelFunction]:
"""Load tools for an agent from the tools directory.
Expand Down Expand Up @@ -310,7 +264,7 @@ async def _load_tools_for_agent(cls, kernel: Kernel, agent_type: str) -> List[Ke
# For other agent types, try to create a simple fallback tool
try:
# Use PromptTemplateConfig to create a simple tool
from semantic_kernel.prompt_template.prompt_template_config import PromptTemplateConfig


# Simple minimal prompt
prompt = f"""You are a helpful assistant specialized in {agent_type} tasks.
Expand Down Expand Up @@ -398,7 +352,14 @@ async def create_all_agents(
session_id=session_id,
user_id=user_id,
temperature=temperature,
agent_instances=agent_instances # Pass agent instances to the planner
agent_instances=agent_instances, # Pass agent instances to the planner
response_format=ResponseFormatJsonSchemaType(
json_schema=ResponseFormatJsonSchema(
name=PlannerResponsePlan.__name__,
description=f"respond with {PlannerResponsePlan.__name__.lower()}",
schema=PlannerResponsePlan.model_json_schema(),
)
)
)
agents[planner_agent_type] = planner_agent

Expand Down
Loading
Loading