diff --git a/backend/src/agent/graph.py b/backend/src/agent/graph.py index 0f19c3f2..33f3f8c7 100644 --- a/backend/src/agent/graph.py +++ b/backend/src/agent/graph.py @@ -44,7 +44,7 @@ def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerationState: """LangGraph node that generates search queries based on the User's question. - Uses Gemini 2.0 Flash to create an optimized search queries for web research based on + Uses Gemini 2.5 Flash to create an optimized search queries for web research based on the User's question. Args: @@ -60,7 +60,7 @@ def generate_query(state: OverallState, config: RunnableConfig) -> QueryGenerati if state.get("initial_search_query_count") is None: state["initial_search_query_count"] = configurable.number_of_initial_queries - # init Gemini 2.0 Flash + # init Gemini 2.5 Flash llm = ChatGoogleGenerativeAI( model=configurable.query_generator_model, temperature=1.0, @@ -95,7 +95,7 @@ def continue_to_web_research(state: QueryGenerationState): def web_research(state: WebSearchState, config: RunnableConfig) -> OverallState: """LangGraph node that performs web research using the native Google Search API tool. - Executes a web search using the native Google Search API tool in combination with Gemini 2.0 Flash. + Executes a web search using the native Google Search API tool in combination with Gemini 2.5 Flash. Args: state: Current graph state containing the search query and research loop count