-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathapp.py
More file actions
38 lines (29 loc) · 1.44 KB
/
app.py
File metadata and controls
38 lines (29 loc) · 1.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import streamlit as st
from main import read_pdf_text
from main import divide_convert_document_chunks
from main import divide_convert_text_chunks
from main import user_input_answer
from main import read_from_url
st.set_page_config("Query With Multiple PDF files")
st.header("Chat with Google Gemini Pro LLM Model to query multiple pdfs")
user_query = st.text_input("Ask your query related to your uploaded files...")
if user_query:
response = user_input_answer(user_query)
st.write(response)
with st.sidebar:
st.title("Uploaded Files")
pdf_docs = st.file_uploader("Upload your files in .pdf format and Click on Submit and process your files",accept_multiple_files=True)
st.write("OR")
url = st.text_input("Paste the url of any article..")
button = st.button("Submit and Process")
if(button):
with st.spinner("Processing...."):
if(url):
document = read_from_url(url)
vector2 = divide_convert_document_chunks(document)
vector2.save_local("Macintosh HD\\Users\\arnav\\desktop\\Chatbot\\faiss_index")
else:
raw_text = read_pdf_text(pdf_docs)
vector1 = divide_convert_text_chunks(raw_text)
vector1.save_local("Macintosh HD\\Users\\arnav\\desktop\\Chatbot\\faiss_index")
st.success("Files Processed. You can continue to Ask your Queries")