Skip to content

Commit 9d6aac3

Browse files
authored
[Feat] OpenAI Assistants (Beta) Integration (#609)
* some working code * refactored and cleaned code * add exception handling * add assistants example notebook with images and pdf * linting * modify notebook name and link * remove agentops github link from notebook * modify image links to github urls * remove agentops content from conclusion section * add assistants examples page * add assistants example to openai integrations page * modify variable name from `original` to `original_func` when patching function * remove casting response to str for `returns` attribute * add partial `LLMEvent` for calculating costs * add logger to error event * check if `usage` is not `None` * add test for assistants api * add more tests * fix typo (no idea how it occurred in the first place)
1 parent 6bf8bd1 commit 9d6aac3

15 files changed

+1874
-2
lines changed

agentops/llms/providers/openai.py

Lines changed: 108 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import inspect
21
import pprint
32
from typing import Optional
43

@@ -16,6 +15,8 @@
1615
class OpenAiProvider(InstrumentedProvider):
1716
original_create = None
1817
original_create_async = None
18+
original_assistant_methods = None
19+
assistants_run_steps = {}
1920

2021
def __init__(self, client):
2122
super().__init__(client)
@@ -138,6 +139,7 @@ async def async_generator():
138139
def override(self):
139140
self._override_openai_v1_completion()
140141
self._override_openai_v1_async_completion()
142+
self._override_openai_assistants_beta()
141143

142144
def _override_openai_v1_completion(self):
143145
from openai.resources.chat import completions
@@ -228,9 +230,114 @@ async def patched_function(*args, **kwargs):
228230
# Override the original method with the patched one
229231
completions.AsyncCompletions.create = patched_function
230232

233+
def _override_openai_assistants_beta(self):
234+
"""Override OpenAI Assistants API methods"""
235+
from openai._legacy_response import LegacyAPIResponse
236+
from openai.resources import beta
237+
from openai.pagination import BasePage
238+
239+
def handle_response(response, kwargs, init_timestamp, session: Optional[Session] = None) -> dict:
240+
"""Handle response based on return type"""
241+
action_event = ActionEvent(init_timestamp=init_timestamp, params=kwargs)
242+
if session is not None:
243+
action_event.session_id = session.session_id
244+
245+
try:
246+
# Set action type and returns
247+
action_event.action_type = (
248+
response.__class__.__name__.split("[")[1][:-1]
249+
if isinstance(response, BasePage)
250+
else response.__class__.__name__
251+
)
252+
action_event.returns = response.model_dump() if hasattr(response, "model_dump") else response
253+
action_event.end_timestamp = get_ISO_time()
254+
self._safe_record(session, action_event)
255+
256+
# Create LLMEvent if usage data exists
257+
response_dict = response.model_dump() if hasattr(response, "model_dump") else {}
258+
259+
if "id" in response_dict and response_dict.get("id").startswith("run"):
260+
if response_dict["id"] not in self.assistants_run_steps:
261+
self.assistants_run_steps[response_dict.get("id")] = {"model": response_dict.get("model")}
262+
263+
if "usage" in response_dict and response_dict["usage"] is not None:
264+
llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
265+
if session is not None:
266+
llm_event.session_id = session.session_id
267+
268+
llm_event.model = response_dict.get("model")
269+
llm_event.prompt_tokens = response_dict["usage"]["prompt_tokens"]
270+
llm_event.completion_tokens = response_dict["usage"]["completion_tokens"]
271+
llm_event.end_timestamp = get_ISO_time()
272+
self._safe_record(session, llm_event)
273+
274+
elif "data" in response_dict:
275+
for item in response_dict["data"]:
276+
if "usage" in item and item["usage"] is not None:
277+
llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
278+
if session is not None:
279+
llm_event.session_id = session.session_id
280+
281+
llm_event.model = self.assistants_run_steps[item["run_id"]]["model"]
282+
llm_event.prompt_tokens = item["usage"]["prompt_tokens"]
283+
llm_event.completion_tokens = item["usage"]["completion_tokens"]
284+
llm_event.end_timestamp = get_ISO_time()
285+
self._safe_record(session, llm_event)
286+
287+
except Exception as e:
288+
self._safe_record(session, ErrorEvent(trigger_event=action_event, exception=e))
289+
290+
kwargs_str = pprint.pformat(kwargs)
291+
response = pprint.pformat(response)
292+
logger.warning(
293+
f"Unable to parse response for Assistants API. Skipping upload to AgentOps\n"
294+
f"response:\n {response}\n"
295+
f"kwargs:\n {kwargs_str}\n"
296+
)
297+
298+
return response
299+
300+
def create_patched_function(original_func):
301+
def patched_function(*args, **kwargs):
302+
init_timestamp = get_ISO_time()
303+
304+
session = kwargs.get("session", None)
305+
if "session" in kwargs.keys():
306+
del kwargs["session"]
307+
308+
response = original_func(*args, **kwargs)
309+
if isinstance(response, LegacyAPIResponse):
310+
return response
311+
312+
return handle_response(response, kwargs, init_timestamp, session=session)
313+
314+
return patched_function
315+
316+
# Store and patch Assistant API methods
317+
assistant_api_methods = {
318+
beta.Assistants: ["create", "retrieve", "update", "delete", "list"],
319+
beta.Threads: ["create", "retrieve", "update", "delete"],
320+
beta.threads.Messages: ["create", "retrieve", "update", "list"],
321+
beta.threads.Runs: ["create", "retrieve", "update", "list", "submit_tool_outputs", "cancel"],
322+
beta.threads.runs.steps.Steps: ["retrieve", "list"],
323+
}
324+
325+
self.original_assistant_methods = {
326+
(cls, method): getattr(cls, method) for cls, methods in assistant_api_methods.items() for method in methods
327+
}
328+
329+
# Override methods and verify
330+
for (cls, method), original_func in self.original_assistant_methods.items():
331+
patched_function = create_patched_function(original_func)
332+
setattr(cls, method, patched_function)
333+
231334
def undo_override(self):
232335
if self.original_create is not None and self.original_create_async is not None:
233336
from openai.resources.chat import completions
234337

235338
completions.AsyncCompletions.create = self.original_create_async
236339
completions.Completions.create = self.original_create
340+
341+
if self.original_assistant_methods is not None:
342+
for (cls, method), original in self.original_assistant_methods.items():
343+
setattr(cls, method, original)

agentops/llms/tracker.py

Lines changed: 35 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,41 @@ class LlmTracker:
2626
SUPPORTED_APIS = {
2727
"litellm": {"1.3.1": ("openai_chat_completions.completion",)},
2828
"openai": {
29-
"1.0.0": ("chat.completions.create",),
29+
"1.0.0": (
30+
"chat.completions.create",
31+
# Assistants
32+
"beta.assistants.create",
33+
"beta.assistants.retrieve",
34+
"beta.assistants.update",
35+
"beta.assistants.delete",
36+
"beta.assistants.list",
37+
"beta.assistants.files.create",
38+
"beta.assistants.files.retrieve",
39+
"beta.assistants.files.delete",
40+
"beta.assistants.files.list",
41+
# Threads
42+
"beta.threads.create",
43+
"beta.threads.retrieve",
44+
"beta.threads.update",
45+
"beta.threads.delete",
46+
# Messages
47+
"beta.threads.messages.create",
48+
"beta.threads.messages.retrieve",
49+
"beta.threads.messages.update",
50+
"beta.threads.messages.list",
51+
"beta.threads.messages.files.retrieve",
52+
"beta.threads.messages.files.list",
53+
# Runs
54+
"beta.threads.runs.create",
55+
"beta.threads.runs.retrieve",
56+
"beta.threads.runs.update",
57+
"beta.threads.runs.list",
58+
"beta.threads.runs.cancel",
59+
"beta.threads.runs.submit_tool_outputs",
60+
# Run Steps
61+
"beta.threads.runs.steps.Steps.retrieve",
62+
"beta.threads.runs.steps.Steps.list",
63+
),
3064
"0.0.0": (
3165
"ChatCompletion.create",
3266
"ChatCompletion.acreate",

docs/v1/examples/examples.mdx

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,10 @@ mode: "wide"
2020
<Card title="Multi Session" icon="computer" href="/v1/examples/multi_session">
2121
Manage multiple sessions at the same time
2222
</Card>
23+
24+
<Card title="OpenAI Assistants" icon={<img src="https://www.github.com/agentops-ai/agentops/blob/main/docs/images/external/openai/openai-logomark.png?raw=true" alt="OpenAI Assistants" />} iconType="image" href="/v1/integrations/openai" href="/v1/examples/openai_assistants">
25+
Observe OpenAI Assistants
26+
</Card>
2327
</CardGroup>
2428

2529
### Integration Examples

0 commit comments

Comments
 (0)