-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp_gui_ensemble.py
More file actions
80 lines (67 loc) · 3.18 KB
/
app_gui_ensemble.py
File metadata and controls
80 lines (67 loc) · 3.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# Import Gradio for UI
import gradio as gr
from rag_app.chains.ensemble_qa_chain import qae
if __name__ == "__main__":
# Function to add a new input to the chat history
def add_text(history, text):
# Append the new text to the history with a placeholder for the response
history = history + [(text, None)]
return history, ""
# Function representing the bot's response mechanism
def bot(history):
# Obtain the response from the 'infer' function using the latest input
response = infer(history[-1][0], history)
sources = [doc.metadata.get("Section") for doc in response['source_documents']]
src_list = '\n'.join(sources)
print_this = response['result']+"\n\n\n Sources: \n\n\n"+src_list
history[-1][1] = print_this
return history
# Function to infer the response using the RAG model
def infer(question, history):
# Use the question and history to query the RAG model
result = qae({"query": question, "history": history, "question": question})
return result
# CSS styling for the Gradio interface
css = """
#col-container {max-width: 1200px; margin-left: auto; margin-right: auto;}
"""
# HTML content for the Gradio interface title
title = """
<div style="text-align:left;">
<p>Hello, I BotTina 2.0, your intelligent AI assistant. <br />
</div>
"""
head_style = """
<style>
@media (min-width: 1536px)
{
.gradio-container {
min-width: var(--size-full) !important;
}
}
</style>
"""
# Building the Gradio interface
with gr.Blocks(theme=gr.themes.Soft(), title="RFP AI Analyzer 🤵🏻♂️", head=head_style) as demo:
with gr.Column(elem_id="col-container"):
gr.HTML() # Add the HTML title to the interface
chatbot = gr.Chatbot([], elem_id="chatbot",
label="RFP Ensemble Analyzer",
bubble_full_width=False,
avatar_images=(None, "https://dacodi-production.s3.amazonaws.com/store/87bc00b6727589462954f2e3ff6f531c.png"),
height=600,) # Initialize the chatbot component
# Create a row for the question input
with gr.Row():
question = gr.Textbox(label="Question", show_label=False, placeholder="Type your question and hit Enter ", scale=4)
send_btn = gr.Button(value="Send", variant="primary", scale=0)
with gr.Row():
clear = gr.Button("Clear") # Add a button to clear the chat
# Define the action when the question is submitted
question.submit(add_text, [chatbot, question], [chatbot, question], queue=False).then(
bot, chatbot, chatbot)
send_btn.click(add_text, [chatbot, question], [chatbot, question], queue=False).then(
bot, chatbot, chatbot)
# Define the action for the clear button
clear.click(lambda: None, None, chatbot, queue=False)
# Launch the Gradio demo interface
demo.queue().launch(share=False, debug=True, server_port=7863)