Skip to content

Commit af5eb30

Browse files
Fix support for models that do support inbuilt function calling. (#151)
* Fix: Ensure CPU is set as default device when CUDA is not used * fix: remove redundant second LLM call * chore: update pypdf * fix: remove debug print statement from generate method * fix: remove type ignore * fix: bind tools to LLM model in agent method * fix: formatting fixes --------- Signed-off-by: Palaniappan R <[email protected]>
1 parent 2eb108d commit af5eb30

File tree

4 files changed

+4
-8
lines changed

4 files changed

+4
-8
lines changed

backend/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ sphinxcontrib-mermaid==0.9.2
1616
sphinx-book-theme==1.1.3
1717
sphinx-copybutton==0.5.2
1818
sphinx-external-toc==1.0.1
19-
pypdf==4.2.0
19+
pypdf==5.6.0
2020
unstructured==0.15.1
2121
nltk==3.9.1
2222
huggingface_hub[cli]==0.24.6

backend/src/agents/retriever_graph.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
133133

134134
tool_choice_chain = (
135135
ChatPromptTemplate.from_template(rephrase_prompt_template)
136-
| self.llm
136+
| model
137137
| JsonOutputParser()
138138
)
139139
response = tool_choice_chain.invoke(
@@ -143,8 +143,6 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
143143
}
144144
)
145145

146-
response = model.invoke(followup_question)
147-
148146
if response is None or response.tool_calls is None:
149147
return {"tools": []}
150148

@@ -189,7 +187,6 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
189187
def generate(self, state: AgentState) -> dict[str, Any]:
190188
query = state["messages"][-1].content
191189
context = state["context"][-1].content
192-
print("state keys", state.keys())
193190

194191
ans = self.llm_chain.invoke({"context": context, "question": query})
195192

backend/src/prompts/prompt_templates.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -75,8 +75,7 @@
7575
Given the chat history, rephrase the follow-up question to be a standalone question.\
7676
The rephrased question should include only relevant information inferred from the chat history.\
7777
If the question is already standalone, return the same question.\
78+
Choose the most appropriate tools from the list of tools to answer the rephrased question.\
7879
Return your response as a json blob with 'rephrased_question'.\
7980
80-
Return your response as a JSON blob with 'rephrased_question'.
81-
8281
"""

backend/src/vectorstores/faiss.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ def __init__(
3131
):
3232
self.embeddings_model_name = embeddings_model_name
3333

34-
model_kwargs = {"device": "cuda"} if use_cuda else {}
34+
model_kwargs = {"device": "cuda"} if use_cuda else {"device": "cpu"}
3535

3636
self.embedding_model: Union[
3737
HuggingFaceEmbeddings, GoogleGenerativeAIEmbeddings, VertexAIEmbeddings

0 commit comments

Comments
 (0)