22from pydantic import BaseModel , Field
33from langchain_openai import ChatOpenAI
44from langchain .prompts import ChatPromptTemplate
5+ import logging
6+ import warnings
7+ from transformers import logging as transformers_logging
8+
9+ # Configure logging
10+ logging .basicConfig (
11+ level = logging .INFO ,
12+ format = '%(asctime)s | %(name)s | %(levelname)s | %(message)s' ,
13+ datefmt = '%Y-%m-%d %H:%M:%S'
14+ )
15+ logger = logging .getLogger (__name__ )
16+
17+ # Suppress specific transformers warnings
18+ transformers_logging .set_verbosity_error ()
19+ warnings .filterwarnings ("ignore" , message = "Setting `pad_token_id` to `eos_token_id`" )
520
621class Agent (BaseModel ):
722 """Base agent class with common properties"""
@@ -10,6 +25,14 @@ class Agent(BaseModel):
1025 description : str
1126 llm : Any = Field (description = "Language model for the agent" )
1227
28+ def log_prompt (self , prompt : str , prefix : str = "" ):
29+ """Log a prompt being sent to the LLM"""
30+ logger .info (f"\n { '=' * 80 } \n { prefix } Prompt:\n { '-' * 40 } \n { prompt } \n { '=' * 80 } " )
31+
32+ def log_response (self , response : str , prefix : str = "" ):
33+ """Log a response received from the LLM"""
34+ logger .info (f"\n { '=' * 80 } \n { prefix } Response:\n { '-' * 40 } \n { response } \n { '=' * 80 } " )
35+
1336class PlannerAgent (Agent ):
1437 """Agent responsible for breaking down problems and planning steps"""
1538 def __init__ (self , llm ):
@@ -21,6 +44,8 @@ def __init__(self, llm):
2144 )
2245
2346 def plan (self , query : str , context : List [Dict [str , Any ]] = None ) -> str :
47+ logger .info (f"\n 🎯 Planning step for query: { query } " )
48+
2449 if context :
2550 template = """You are a strategic planning agent. Your role is to break down complex problems into clear, manageable steps.
2651
@@ -34,6 +59,7 @@ def plan(self, query: str, context: List[Dict[str, Any]] = None) -> str:
3459
3560 Plan:"""
3661 context_str = "\n \n " .join ([f"Context { i + 1 } :\n { item ['content' ]} " for i , item in enumerate (context )])
62+ logger .info (f"Using context ({ len (context )} items)" )
3763 else :
3864 template = """You are a strategic planning agent. Your role is to break down complex problems into clear, manageable steps.
3965
@@ -44,10 +70,15 @@ def plan(self, query: str, context: List[Dict[str, Any]] = None) -> str:
4470
4571 Plan:"""
4672 context_str = ""
73+ logger .info ("No context available" )
4774
4875 prompt = ChatPromptTemplate .from_template (template )
4976 messages = prompt .format_messages (query = query , context = context_str )
77+ prompt_text = "\n " .join ([msg .content for msg in messages ])
78+ self .log_prompt (prompt_text , "Planner" )
79+
5080 response = self .llm .invoke (messages )
81+ self .log_response (response .content , "Planner" )
5182 return response .content
5283
5384class ResearchAgent (Agent ):
@@ -64,14 +95,18 @@ def __init__(self, llm, vector_store):
6495 )
6596
6697 def research (self , query : str , step : str ) -> List [Dict [str , Any ]]:
98+ logger .info (f"\n 🔍 Researching for step: { step } " )
99+
67100 # Query all collections
68101 pdf_results = self .vector_store .query_pdf_collection (query )
69102 repo_results = self .vector_store .query_repo_collection (query )
70103
71104 # Combine results
72105 all_results = pdf_results + repo_results
106+ logger .info (f"Found { len (all_results )} relevant documents" )
73107
74108 if not all_results :
109+ logger .warning ("No relevant documents found" )
75110 return []
76111
77112 # Have LLM analyze and summarize findings
@@ -89,7 +124,11 @@ def research(self, query: str, step: str) -> List[Dict[str, Any]]:
89124 context_str = "\n \n " .join ([f"Source { i + 1 } :\n { item ['content' ]} " for i , item in enumerate (all_results )])
90125 prompt = ChatPromptTemplate .from_template (template )
91126 messages = prompt .format_messages (step = step , context = context_str )
127+ prompt_text = "\n " .join ([msg .content for msg in messages ])
128+ self .log_prompt (prompt_text , "Researcher" )
129+
92130 response = self .llm .invoke (messages )
131+ self .log_response (response .content , "Researcher" )
93132
94133 return [{"content" : response .content , "metadata" : {"source" : "Research Summary" }}]
95134
@@ -104,6 +143,8 @@ def __init__(self, llm):
104143 )
105144
106145 def reason (self , query : str , step : str , context : List [Dict [str , Any ]]) -> str :
146+ logger .info (f"\n 🤔 Reasoning about step: { step } " )
147+
107148 template = """You are a reasoning agent. Your role is to apply logical analysis to information and draw conclusions.
108149
109150 Given the following step, context, and query, apply logical reasoning to reach a conclusion.
@@ -121,7 +162,11 @@ def reason(self, query: str, step: str, context: List[Dict[str, Any]]) -> str:
121162 context_str = "\n \n " .join ([f"Context { i + 1 } :\n { item ['content' ]} " for i , item in enumerate (context )])
122163 prompt = ChatPromptTemplate .from_template (template )
123164 messages = prompt .format_messages (step = step , query = query , context = context_str )
165+ prompt_text = "\n " .join ([msg .content for msg in messages ])
166+ self .log_prompt (prompt_text , "Reasoner" )
167+
124168 response = self .llm .invoke (messages )
169+ self .log_response (response .content , "Reasoner" )
125170 return response .content
126171
127172class SynthesisAgent (Agent ):
@@ -135,6 +180,8 @@ def __init__(self, llm):
135180 )
136181
137182 def synthesize (self , query : str , reasoning_steps : List [str ]) -> str :
183+ logger .info (f"\n 📝 Synthesizing final answer from { len (reasoning_steps )} reasoning steps" )
184+
138185 template = """You are a synthesis agent. Your role is to combine multiple pieces of information into a clear, coherent response.
139186
140187 Given the following query and reasoning steps, create a final comprehensive answer.
@@ -150,7 +197,11 @@ def synthesize(self, query: str, reasoning_steps: List[str]) -> str:
150197 steps_str = "\n \n " .join ([f"Step { i + 1 } :\n { step } " for i , step in enumerate (reasoning_steps )])
151198 prompt = ChatPromptTemplate .from_template (template )
152199 messages = prompt .format_messages (query = query , steps = steps_str )
200+ prompt_text = "\n " .join ([msg .content for msg in messages ])
201+ self .log_prompt (prompt_text , "Synthesizer" )
202+
153203 response = self .llm .invoke (messages )
204+ self .log_response (response .content , "Synthesizer" )
154205 return response .content
155206
156207def create_agents (llm , vector_store = None ):
0 commit comments