Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ sphinxcontrib-mermaid==0.9.2
sphinx-book-theme==1.1.3
sphinx-copybutton==0.5.2
sphinx-external-toc==1.0.1
pypdf==4.2.0
pypdf==5.6.0
unstructured==0.15.1
nltk==3.9.1
huggingface_hub[cli]==0.24.6
5 changes: 1 addition & 4 deletions backend/src/agents/retriever_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:

tool_choice_chain = (
ChatPromptTemplate.from_template(rephrase_prompt_template)
| self.llm
| model
| JsonOutputParser()
)
response = tool_choice_chain.invoke(
Expand All @@ -143,8 +143,6 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
}
)

response = model.invoke(followup_question)

if response is None or response.tool_calls is None:
return {"tools": []}

Expand Down Expand Up @@ -189,7 +187,6 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
def generate(self, state: AgentState) -> dict[str, Any]:
query = state["messages"][-1].content
context = state["context"][-1].content
print("state keys", state.keys())

ans = self.llm_chain.invoke({"context": context, "question": query})

Expand Down
3 changes: 1 addition & 2 deletions backend/src/prompts/prompt_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,7 @@
Given the chat history, rephrase the follow-up question to be a standalone question.\
The rephrased question should include only relevant information inferred from the chat history.\
If the question is already standalone, return the same question.\
Choose the most appropriate tools from the list of tools to answer the rephrased question.\
Return your response as a json blob with 'rephrased_question'.\

Return your response as a JSON blob with 'rephrased_question'.

"""
2 changes: 1 addition & 1 deletion backend/src/vectorstores/faiss.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(
):
self.embeddings_model_name = embeddings_model_name

model_kwargs = {"device": "cuda"} if use_cuda else {}
model_kwargs = {"device": "cuda"} if use_cuda else {"device": "cpu"}

self.embedding_model: Union[
HuggingFaceEmbeddings, GoogleGenerativeAIEmbeddings, VertexAIEmbeddings
Expand Down