Skip to content

Commit 904b09b

Browse files
Merge branch 'main' into srihari-r-iamreadme-1
2 parents a718aed + 9ec837e commit 904b09b

File tree

152 files changed

+15592
-299
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

152 files changed

+15592
-299
lines changed

.DS_Store

6 KB
Binary file not shown.

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,4 @@
22
shared-assets/bastion-py-script/.oci/
33
shared-assets/bastion-py-script/temp/
44
temp/
5+
app-dev/app-integration-and-automation/oracle-integration-cloud/01-oic-connectivity-agent/README_tmp.html
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
Copyright (c) 2025 Oracle and/or its affiliates.
2+
3+
The Universal Permissive License (UPL), Version 1.0
4+
5+
Subject to the condition set forth below, permission is hereby granted to any
6+
person obtaining a copy of this software, associated documentation and/or data
7+
(collectively the "Software"), free of charge and under any and all copyright
8+
rights in the Software, and any and all patent rights owned or freely
9+
licensable by each licensor hereunder covering either (i) the unmodified
10+
Software as contributed to or provided by such licensor, or (ii) the Larger
11+
Works (as defined below), to deal in both
12+
13+
(a) the Software, and
14+
(b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
15+
one is included with the Software (each a "Larger Work" to which the Software
16+
is contributed by such licensors),
17+
18+
without restriction, including without limitation the rights to copy, create
19+
derivative works of, display, perform, and distribute the Software and make,
20+
use, sell, offer for sale, import, export, have made, and have sold the
21+
Software and the Larger Work(s), and to sublicense the foregoing rights on
22+
either these or other terms.
23+
24+
This license is subject to the following condition:
25+
The above copyright notice and either this complete permission notice or at
26+
a minimum a reference to the UPL must be included in all copies or
27+
substantial portions of the Software.
28+
29+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
30+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
31+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
32+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
33+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
34+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
35+
SOFTWARE.
Lines changed: 213 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,213 @@
1+
"""
2+
File name: assistant_ui.py
3+
Author: Luigi Saetta & Omar Salem
4+
Date last modified: 2025-04-18
5+
Python Version: 3.11
6+
7+
Usage:
8+
streamlit run assistant_ui.py
9+
10+
License:
11+
This code is released under the MIT License.
12+
13+
Notes:
14+
This is part of a series of demos developed using OCI GenAI and LangChain.
15+
16+
Warnings:
17+
This module is in development, may change in future versions.
18+
"""
19+
20+
from typing import List, Union
21+
import time
22+
import streamlit as st
23+
import pandas as pd
24+
import tempfile
25+
from langchain_core.messages import HumanMessage, AIMessage
26+
27+
# for APM integration
28+
# from py_zipkin.zipkin import zipkin_span
29+
# from py_zipkin import Encoding
30+
31+
from csv_analyzer_agent import State, MultiAgent
32+
from utils import get_console_logger
33+
from config import DEBUG
34+
35+
# Constants
36+
AGENT_NAME = "AI_DATA_ANALYZER"
37+
38+
# User and assistant roles
39+
USER = "user"
40+
ASSISTANT = "assistant"
41+
42+
logger = get_console_logger()
43+
44+
# Initialize session state
45+
if "chat_history" not in st.session_state:
46+
st.session_state.chat_history = []
47+
if "df" not in st.session_state:
48+
st.session_state.df = None
49+
if "pdf_path" not in st.session_state:
50+
st.session_state.pdf_path = None
51+
if "extracted_information" not in st.session_state:
52+
st.session_state.extracted_information = None
53+
if "agent" not in st.session_state:
54+
# Initialize the multi-agent system
55+
st.session_state.agent = MultiAgent()
56+
57+
def display_msg_on_rerun(chat_hist: List[Union[HumanMessage, AIMessage]]) -> None:
58+
"""Display all messages on rerun."""
59+
for msg in chat_hist:
60+
role = USER if isinstance(msg, HumanMessage) else ASSISTANT
61+
with st.chat_message(role):
62+
st.markdown(msg.content)
63+
64+
def reset_conversation():
65+
st.session_state.chat_history = []
66+
67+
def add_to_chat_history(msg):
68+
st.session_state.chat_history.append(msg)
69+
70+
def get_chat_history():
71+
return st.session_state.chat_history
72+
73+
def display_extracted_data():
74+
if st.session_state.extracted_information:
75+
st.sidebar.subheader("📄 Extracted PDF Information")
76+
extracted = st.session_state.extracted_information.data
77+
for key, value in extracted.items():
78+
label = f"**{key.replace('_', ' ').title()}:**"
79+
if isinstance(value, (dict, list)):
80+
st.sidebar.markdown(label)
81+
st.sidebar.json(value)
82+
else:
83+
st.sidebar.markdown(f"{label} {value}")
84+
85+
st.title("AI Data Analyzer")
86+
87+
if st.sidebar.button("Clear Chat History"):
88+
reset_conversation()
89+
90+
st.sidebar.header("Options")
91+
92+
model_id = st.sidebar.selectbox("Select the Chat Model", ["meta.llama3.3-70B"])
93+
94+
# Upload CSV
95+
uploaded_file = st.sidebar.file_uploader("Load a CSV file", type=["csv"])
96+
97+
if uploaded_file:
98+
try:
99+
df = pd.read_csv(uploaded_file)
100+
st.session_state.df = df
101+
st.dataframe(df)
102+
except Exception as e:
103+
st.error(f"Error reading the CSV file: {e}")
104+
else:
105+
st.session_state.df = None
106+
107+
# Upload PDF
108+
uploaded_pdf = st.sidebar.file_uploader("Upload a PDF file", type=["pdf"])
109+
110+
if uploaded_pdf:
111+
try:
112+
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as temp_pdf:
113+
temp_pdf.write(uploaded_pdf.getvalue())
114+
temp_pdf_path = temp_pdf.name
115+
116+
st.session_state.pdf_path = temp_pdf_path
117+
st.session_state.extracted_information = None
118+
119+
pdf_state = State(
120+
user_request="",
121+
pdf_path=temp_pdf_path,
122+
extracted_information=None,
123+
input_df=st.session_state.df,
124+
chat_history=st.session_state.chat_history,
125+
previous_error=None,
126+
error=None,
127+
)
128+
129+
extracted_info = st.session_state.agent.process_pdf_node(pdf_state)
130+
131+
if extracted_info["extracted_information"]:
132+
st.session_state.extracted_information = extracted_info["extracted_information"]
133+
st.toast("✅ PDF Processed Successfully!")
134+
else:
135+
st.error("⚠️ Failed to extract information from PDF.")
136+
137+
except Exception as e:
138+
st.error(f"Error during PDF processing: {e}")
139+
else:
140+
st.session_state.extracted_information = None
141+
st.session_state.pdf_path = None
142+
143+
# DISPLAY PDF INFO in sidebar always if available
144+
display_extracted_data()
145+
146+
#DISPLAY CHAT HISTORY
147+
display_msg_on_rerun(get_chat_history())
148+
149+
#
150+
# Chat Input Handling
151+
#
152+
if question := st.chat_input("Hello, how can I help you?"):
153+
st.chat_message(USER).markdown(question)
154+
add_to_chat_history(HumanMessage(content=question)) # ✅ Store user message
155+
156+
try:
157+
with st.spinner("Calling AI..."):
158+
time_start = time.time()
159+
160+
app = st.session_state.agent.create_workflow()
161+
162+
state = State(
163+
user_request=question,
164+
input_df=st.session_state.df,
165+
extracted_information=st.session_state.extracted_information or {},
166+
pdf_path=st.session_state.pdf_path,
167+
chat_history=st.session_state.chat_history.copy(),
168+
error=None,
169+
previous_error=None,
170+
)
171+
172+
results = []
173+
error = None
174+
#uncomment to use zipkin however makes it slower.
175+
# with zipkin_span(
176+
# service_name=AGENT_NAME,
177+
# span_name="call",
178+
# transport_handler=http_transport,
179+
# encoding=Encoding.V2_JSON,
180+
# sample_rate=100,
181+
# ):
182+
183+
for event in app.stream(state):
184+
for key, value in event.items():
185+
logger.info(f"Completed: {key}")
186+
st.toast(f"Completed: {key}")
187+
results.append(value)
188+
error = value.get("error")
189+
190+
if key == "CodeGenerator" and error is None:
191+
st.sidebar.header("Generated Code:")
192+
st.sidebar.code(value["code_generated"], language="python")
193+
194+
if error is None:
195+
final_result = results[-1]["final_output"]
196+
if isinstance(final_result, pd.DataFrame):
197+
st.dataframe(final_result)
198+
if DEBUG:
199+
logger.info(final_result.head(10))
200+
else:
201+
with st.chat_message(ASSISTANT):
202+
st.markdown(final_result)
203+
add_to_chat_history(AIMessage(content=final_result)) # Store AI response
204+
205+
else:
206+
st.error(error)
207+
208+
elapsed_time = round((time.time() - time_start), 1)
209+
logger.info(f"Elapsed time: {elapsed_time} sec.")
210+
211+
except Exception as e:
212+
logger.error(f"Error occurred: {e}")
213+
st.error(f"An error occurred: {e}")
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
[DEFAULT]
2+
user=ocid1.user.oc
3+
fingerprint=c6:4f:66:e7:
4+
tenancy=ocid1.tenancy.oc
5+
region=eu-frankfurt-1
6+
key_file=~/.oci/
7+
8+
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
"""
2+
OCI models configuration and general config
3+
"""
4+
5+
DEBUG = False
6+
7+
MODEL_ID = "meta.llama-3.3-70b-instruct"
8+
9+
AUTH = "API_KEY"
10+
SERVICE_ENDPOINT = "https://inference.generativeai.eu-frankfurt-1.oci.oraclecloud.com"
11+
12+
TEMPERATURE = 0.1
13+
MAX_TOKENS = 1024
14+
TOP_P = 0.9
15+
16+
# OCI general
17+
COMPARTMENT_ID = "ocid1.compar"
18+
19+
# history management
20+
MAX_MSGS_IN_HISTORY = 10
21+
# low, cause we're generating code
22+
MAX_ROWS_IN_SAMPLE = 10
23+
24+
RAG_AGENT_ID = "ocid1.genaia"
25+
RAG_AGENT_ENDPOINT = (
26+
"https://agent-runtime.generativeai.uk-london-1.oci.oraclecloud.com"
27+
)
28+
29+
# integration with APM
30+
ENABLE_TRACING = True
31+
APM_BASE_URL = "https://aaa"
32+
APM_CONTENT_TYPE = "application/json"
33+
APM_PUBLIC_KEY = "6OXZ45B="

0 commit comments

Comments
 (0)