Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,9 @@ def _instrument(self, **kwargs):

register_post_import_hook(self._patch, "openai")

def _patch(self, _module):
def _patch(self, module):
version = tuple([int(x) for x in getattr(getattr(module, "version"), "VERSION").split(".")])
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let me know if this is the sensible pattern for patching newer modules

Copy link
Member

@xrmx xrmx Mar 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can use module.version.VERSION directly since you are not adding any default to getattr though.

Problem with this approach is when these would be promoted out of beta, I expect the version check will pass but the patching will fail. I think a more robust approach would be to try to wrap anyway and catch the ModuleNotFoundError and AttributeError exceptions and set the beta_chat_available boolean accordingly.

self.beta_chat_available = version >= (1, 40, 0)
wrap_function_wrapper(
"openai.resources.chat.completions",
"Completions.create",
Expand All @@ -110,6 +112,17 @@ def _patch(self, _module):
"AsyncCompletions.create",
self._async_chat_completion_wrapper,
)
if self.beta_chat_available:
wrap_function_wrapper(
"openai.resources.beta.chat.completions",
"Completions.parse",
self._chat_completion_wrapper,
)
wrap_function_wrapper(
"openai.resources.beta.chat.completions",
"AsyncCompletions.parse",
self._async_chat_completion_wrapper,
)
wrap_function_wrapper(
"openai.resources.embeddings",
"Embeddings.create",
Expand All @@ -128,11 +141,14 @@ def _uninstrument(self, **kwargs):

unwrap(openai.resources.chat.completions.Completions, "create")
unwrap(openai.resources.chat.completions.AsyncCompletions, "create")
if self.beta_chat_available:
unwrap(openai.resources.beta.chat.completions.Completions, "parse")
unwrap(openai.resources.beta.chat.completions.AsyncCompletions, "parse")
unwrap(openai.resources.embeddings.Embeddings, "create")
unwrap(openai.resources.embeddings.AsyncEmbeddings, "create")

def _chat_completion_wrapper(self, wrapped, instance, args, kwargs):
logger.debug(f"openai.resources.chat.completions.Completions.create kwargs: {kwargs}")
logger.debug(f"{wrapped} kwargs: {kwargs}")

span_attributes = _get_attributes_from_wrapper(instance, kwargs)
event_attributes = _get_event_attributes()
Expand Down
Loading