1+ """
2+ File name: assistant_ui.py
3+ Author: Luigi Saetta & Omar Salem
4+ Date last modified: 2025-04-18
5+ Python Version: 3.11
6+
7+ Usage:
8+ streamlit run assistant_ui.py
9+
10+ License:
11+ This code is released under the MIT License.
12+
13+ Notes:
14+ This is part of a series of demos developed using OCI GenAI and LangChain.
15+
16+ Warnings:
17+ This module is in development, may change in future versions.
18+ """
19+
20+ from typing import List , Union
21+ import time
22+ import streamlit as st
23+ import pandas as pd
24+ import tempfile
25+ from langchain_core .messages import HumanMessage , AIMessage
26+
27+ # for APM integration
28+ # from py_zipkin.zipkin import zipkin_span
29+ # from py_zipkin import Encoding
30+
31+ from csv_analyzer_agent import State , MultiAgent
32+ from utils import get_console_logger
33+ from config import DEBUG
34+
35+ # Constants
36+ AGENT_NAME = "AI_DATA_ANALYZER"
37+
38+ # User and assistant roles
39+ USER = "user"
40+ ASSISTANT = "assistant"
41+
42+ logger = get_console_logger ()
43+
44+ # Initialize session state
45+ if "chat_history" not in st .session_state :
46+ st .session_state .chat_history = []
47+ if "df" not in st .session_state :
48+ st .session_state .df = None
49+ if "pdf_path" not in st .session_state :
50+ st .session_state .pdf_path = None
51+ if "extracted_information" not in st .session_state :
52+ st .session_state .extracted_information = None
53+ if "agent" not in st .session_state :
54+ # Initialize the multi-agent system
55+ st .session_state .agent = MultiAgent ()
56+
57+ def display_msg_on_rerun (chat_hist : List [Union [HumanMessage , AIMessage ]]) -> None :
58+ """Display all messages on rerun."""
59+ for msg in chat_hist :
60+ role = USER if isinstance (msg , HumanMessage ) else ASSISTANT
61+ with st .chat_message (role ):
62+ st .markdown (msg .content )
63+
64+ def reset_conversation ():
65+ st .session_state .chat_history = []
66+
67+ def add_to_chat_history (msg ):
68+ st .session_state .chat_history .append (msg )
69+
70+ def get_chat_history ():
71+ return st .session_state .chat_history
72+
73+ def display_extracted_data ():
74+ if st .session_state .extracted_information :
75+ st .sidebar .subheader ("📄 Extracted PDF Information" )
76+ extracted = st .session_state .extracted_information .data
77+ for key , value in extracted .items ():
78+ label = f"**{ key .replace ('_' , ' ' ).title ()} :**"
79+ if isinstance (value , (dict , list )):
80+ st .sidebar .markdown (label )
81+ st .sidebar .json (value )
82+ else :
83+ st .sidebar .markdown (f"{ label } { value } " )
84+
85+ st .title ("AI Data Analyzer" )
86+
87+ if st .sidebar .button ("Clear Chat History" ):
88+ reset_conversation ()
89+
90+ st .sidebar .header ("Options" )
91+
92+ model_id = st .sidebar .selectbox ("Select the Chat Model" , ["meta.llama3.3-70B" ])
93+
94+ # Upload CSV
95+ uploaded_file = st .sidebar .file_uploader ("Load a CSV file" , type = ["csv" ])
96+
97+ if uploaded_file :
98+ try :
99+ df = pd .read_csv (uploaded_file )
100+ st .session_state .df = df
101+ st .dataframe (df )
102+ except Exception as e :
103+ st .error (f"Error reading the CSV file: { e } " )
104+ else :
105+ st .session_state .df = None
106+
107+ # Upload PDF
108+ uploaded_pdf = st .sidebar .file_uploader ("Upload a PDF file" , type = ["pdf" ])
109+
110+ if uploaded_pdf :
111+ try :
112+ with tempfile .NamedTemporaryFile (delete = False , suffix = ".pdf" ) as temp_pdf :
113+ temp_pdf .write (uploaded_pdf .getvalue ())
114+ temp_pdf_path = temp_pdf .name
115+
116+ st .session_state .pdf_path = temp_pdf_path
117+ st .session_state .extracted_information = None
118+
119+ pdf_state = State (
120+ user_request = "" ,
121+ pdf_path = temp_pdf_path ,
122+ extracted_information = None ,
123+ input_df = st .session_state .df ,
124+ chat_history = st .session_state .chat_history ,
125+ previous_error = None ,
126+ error = None ,
127+ )
128+
129+ extracted_info = st .session_state .agent .process_pdf_node (pdf_state )
130+
131+ if extracted_info ["extracted_information" ]:
132+ st .session_state .extracted_information = extracted_info ["extracted_information" ]
133+ st .toast ("✅ PDF Processed Successfully!" )
134+ else :
135+ st .error ("⚠️ Failed to extract information from PDF." )
136+
137+ except Exception as e :
138+ st .error (f"Error during PDF processing: { e } " )
139+ else :
140+ st .session_state .extracted_information = None
141+ st .session_state .pdf_path = None
142+
143+ # DISPLAY PDF INFO in sidebar always if available
144+ display_extracted_data ()
145+
146+ #DISPLAY CHAT HISTORY
147+ display_msg_on_rerun (get_chat_history ())
148+
149+ #
150+ # Chat Input Handling
151+ #
152+ if question := st .chat_input ("Hello, how can I help you?" ):
153+ st .chat_message (USER ).markdown (question )
154+ add_to_chat_history (HumanMessage (content = question )) # ✅ Store user message
155+
156+ try :
157+ with st .spinner ("Calling AI..." ):
158+ time_start = time .time ()
159+
160+ app = st .session_state .agent .create_workflow ()
161+
162+ state = State (
163+ user_request = question ,
164+ input_df = st .session_state .df ,
165+ extracted_information = st .session_state .extracted_information or {},
166+ pdf_path = st .session_state .pdf_path ,
167+ chat_history = st .session_state .chat_history .copy (),
168+ error = None ,
169+ previous_error = None ,
170+ )
171+
172+ results = []
173+ error = None
174+ #uncomment to use zipkin however makes it slower.
175+ # with zipkin_span(
176+ # service_name=AGENT_NAME,
177+ # span_name="call",
178+ # transport_handler=http_transport,
179+ # encoding=Encoding.V2_JSON,
180+ # sample_rate=100,
181+ # ):
182+
183+ for event in app .stream (state ):
184+ for key , value in event .items ():
185+ logger .info (f"Completed: { key } " )
186+ st .toast (f"Completed: { key } " )
187+ results .append (value )
188+ error = value .get ("error" )
189+
190+ if key == "CodeGenerator" and error is None :
191+ st .sidebar .header ("Generated Code:" )
192+ st .sidebar .code (value ["code_generated" ], language = "python" )
193+
194+ if error is None :
195+ final_result = results [- 1 ]["final_output" ]
196+ if isinstance (final_result , pd .DataFrame ):
197+ st .dataframe (final_result )
198+ if DEBUG :
199+ logger .info (final_result .head (10 ))
200+ else :
201+ with st .chat_message (ASSISTANT ):
202+ st .markdown (final_result )
203+ add_to_chat_history (AIMessage (content = final_result )) # Store AI response
204+
205+ else :
206+ st .error (error )
207+
208+ elapsed_time = round ((time .time () - time_start ), 1 )
209+ logger .info (f"Elapsed time: { elapsed_time } sec." )
210+
211+ except Exception as e :
212+ logger .error (f"Error occurred: { e } " )
213+ st .error (f"An error occurred: { e } " )
0 commit comments