77from langchain .memory import ConversationBufferWindowMemory
88from langchain_openai import OpenAI
99from langchain .memory import ConversationBufferWindowMemory
10- from langchain_core .prompts import ChatPromptTemplate
10+ from langchain_core .prompts import ChatPromptTemplate , MessagesPlaceholder
1111import logging
1212from langchain_core .prompts .chat import (
1313 SystemMessagePromptTemplate ,
@@ -50,7 +50,9 @@ def get_llm():
5050def delete_chat_message_history (session ):
5151 result = False
5252 try :
53- chat_message_history : MongoDBChatMessageHistory = get_chat_message_history (session )
53+ chat_message_history : MongoDBChatMessageHistory = get_chat_message_history (
54+ session
55+ )
5456 chat_message_history .clear ()
5557 result = True
5658 except Exception as e :
@@ -65,22 +67,25 @@ def get_chat_message_history(session):
6567 database_name = MONGO_DB_NAME ,
6668 collection_name = "chat_histories" ,
6769 )
70+ app .logger .debug ("Chat message history %s" , chat_message_history )
6871 return chat_message_history
6972
7073
7174def get_qa_chain (llm , retriever , session ):
7275 system_prompt_template = """
7376 You are a helpful AI Assistant. Your name is CrapBot.
74- {summaries}
75- Previous Conversations till now: {chat_history}
7677 Respond to the user questions and do not withhold any information
77- """
78+ Use the following pieces of retrieved context:
79+ {summaries}
80+ Previous conversation:
81+ """
82+ chat_prompt_template = "{chat_history}"
7883 human_prompt_template = "{question}."
7984 chatbot_prompt_template = "CrapBot:"
8085 messages = [
81- SystemMessagePromptTemplate . from_template ( system_prompt_template ),
82- HumanMessagePromptTemplate . from_template ( human_prompt_template ),
83- AIMessagePromptTemplate . from_template ( chatbot_prompt_template ),
86+ ( "system" , system_prompt_template ),
87+ ( "placeholder" , chat_prompt_template ),
88+ ( "human" , human_prompt_template ),
8489 ]
8590
8691 PROMPT = ChatPromptTemplate .from_messages (
@@ -105,8 +110,9 @@ def get_qa_chain(llm, retriever, session):
105110 return qa
106111
107112
108- def qa_answer (model , query ):
109- result = model .invoke (query )
113+ def qa_answer (model , session , query ):
114+ result = model .invoke ({"question" : query })
115+ app .logger .debug ("Result %s" , result )
110116 app .logger .debug ("Answering question %s" , result ["answer" ])
111117 return result ["answer" ]
112118
@@ -199,7 +205,7 @@ def ask_bot():
199205 question = request .json ["question" ]
200206 llm = get_llm ()
201207 model = get_qa_chain (llm , retriever_l , session )
202- answer = qa_answer (model , question )
208+ answer = qa_answer (model , session , question )
203209 app .logger .info ("###########################################" )
204210 app .logger .info ("Attacker Question:: %s" , question )
205211 app .logger .info ("App Answer:: %s" , answer )
0 commit comments