1
+ import gradio as gr
2
+ import os
3
+ from typing import List , Dict , Any
4
+ from pathlib import Path
5
+ import tempfile
6
+ from dotenv import load_dotenv
7
+ import yaml
8
+
9
+ from pdf_processor import PDFProcessor
10
+ from web_processor import WebProcessor
11
+ from store import VectorStore
12
+ from local_rag_agent import LocalRAGAgent
13
+ from rag_agent import RAGAgent
14
+
15
+ # Load environment variables and config
16
+ load_dotenv ()
17
+
18
+ def load_config ():
19
+ """Load configuration from config.yaml"""
20
+ try :
21
+ with open ('config.yaml' , 'r' ) as f :
22
+ config = yaml .safe_load (f )
23
+ return config .get ('HUGGING_FACE_HUB_TOKEN' )
24
+ except Exception as e :
25
+ print (f"Error loading config: { str (e )} " )
26
+ return None
27
+
28
+ # Initialize components
29
+ pdf_processor = PDFProcessor ()
30
+ web_processor = WebProcessor ()
31
+ vector_store = VectorStore ()
32
+
33
+ # Initialize agents
34
+ hf_token = load_config ()
35
+ openai_key = os .getenv ("OPENAI_API_KEY" )
36
+
37
+ local_agent = LocalRAGAgent (vector_store ) if hf_token else None
38
+ openai_agent = RAGAgent (vector_store , openai_api_key = openai_key ) if openai_key else None
39
+
40
+ def process_pdf (file : tempfile ._TemporaryFileWrapper ) -> str :
41
+ """Process uploaded PDF file"""
42
+ try :
43
+ chunks = pdf_processor .process_pdf (file .name )
44
+ vector_store .add_pdf_chunks (chunks )
45
+ return f"✓ Successfully processed PDF and added { len (chunks )} chunks to knowledge base"
46
+ except Exception as e :
47
+ return f"✗ Error processing PDF: { str (e )} "
48
+
49
+ def process_url (url : str ) -> str :
50
+ """Process web content from URL"""
51
+ try :
52
+ content = web_processor .process_url (url )
53
+ vector_store .add_web_content (content )
54
+ return f"✓ Successfully processed URL and added content to knowledge base"
55
+ except Exception as e :
56
+ return f"✗ Error processing URL: { str (e )} "
57
+
58
+ def chat (message : str , history : List [List [str ]], agent_type : str , use_cot : bool ) -> str :
59
+ """Process chat message using selected agent"""
60
+ try :
61
+ # Select appropriate agent
62
+ agent = local_agent if agent_type == "Local (Mistral)" else openai_agent
63
+ if not agent :
64
+ return "Agent not available. Please check your configuration."
65
+
66
+ # Set CoT option
67
+ agent .use_cot = use_cot
68
+
69
+ # Process query
70
+ response = agent .process_query (message )
71
+ return response ["answer" ]
72
+ except Exception as e :
73
+ return f"Error processing query: { str (e )} "
74
+
75
+ def create_interface ():
76
+ """Create Gradio interface"""
77
+ with gr .Blocks (title = "Agentic RAG System" , theme = gr .themes .Soft ()) as interface :
78
+ gr .Markdown ("""
79
+ # 🤖 Agentic RAG System
80
+
81
+ Upload PDFs, process web content, and chat with your documents using local or OpenAI models.
82
+ """ )
83
+
84
+ with gr .Tab ("Document Processing" ):
85
+ with gr .Row ():
86
+ with gr .Column ():
87
+ pdf_file = gr .File (label = "Upload PDF" )
88
+ pdf_button = gr .Button ("Process PDF" )
89
+ pdf_output = gr .Textbox (label = "PDF Processing Output" )
90
+
91
+ with gr .Column ():
92
+ url_input = gr .Textbox (label = "Enter URL" )
93
+ url_button = gr .Button ("Process URL" )
94
+ url_output = gr .Textbox (label = "URL Processing Output" )
95
+
96
+ with gr .Tab ("Chat Interface" ):
97
+ agent_dropdown = gr .Dropdown (
98
+ choices = ["Local (Mistral)" , "OpenAI" ] if openai_key else ["Local (Mistral)" ],
99
+ value = "Local (Mistral)" ,
100
+ label = "Select Agent"
101
+ )
102
+ cot_checkbox = gr .Checkbox (label = "Enable Chain of Thought Reasoning" , value = False )
103
+ chatbot = gr .Chatbot (height = 400 )
104
+ msg = gr .Textbox (label = "Your Message" )
105
+ clear = gr .Button ("Clear Chat" )
106
+
107
+ # Event handlers
108
+ pdf_button .click (process_pdf , inputs = [pdf_file ], outputs = [pdf_output ])
109
+ url_button .click (process_url , inputs = [url_input ], outputs = [url_output ])
110
+ msg .submit (chat , inputs = [msg , chatbot , agent_dropdown , cot_checkbox ], outputs = [chatbot ])
111
+ clear .click (lambda : None , None , chatbot , queue = False )
112
+
113
+ # Instructions
114
+ gr .Markdown ("""
115
+ ## Instructions
116
+
117
+ 1. **Document Processing**:
118
+ - Upload PDFs using the file uploader
119
+ - Process web content by entering URLs
120
+ - All processed content is added to the knowledge base
121
+
122
+ 2. **Chat Interface**:
123
+ - Select your preferred agent (Local Mistral or OpenAI)
124
+ - Toggle Chain of Thought reasoning for more detailed responses
125
+ - Chat with your documents using natural language
126
+
127
+ Note: OpenAI agent requires an API key in `.env` file
128
+ """ )
129
+
130
+ return interface
131
+
132
+ def main ():
133
+ # Check configuration
134
+ if not hf_token and not openai_key :
135
+ print ("⚠️ Warning: Neither HuggingFace token nor OpenAI key found. Please configure at least one." )
136
+
137
+ # Launch interface
138
+ interface = create_interface ()
139
+ interface .launch (
140
+ server_name = "0.0.0.0" ,
141
+ server_port = 7860 ,
142
+ share = True ,
143
+ inbrowser = True
144
+ )
145
+
146
+ if __name__ == "__main__" :
147
+ main ()
0 commit comments