Skip to content
72 changes: 60 additions & 12 deletions agentops/llms/tracker.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import inspect
import sys
from importlib import import_module
from importlib.metadata import version
Expand Down Expand Up @@ -99,15 +100,60 @@

def __init__(self, client):
self.client = client
self.litellm_initialized = False

def _is_litellm_call(self):
"""
Detects if the API call originated from LiteLLM.

**Issue We Are Addressing:**
- When using LiteLLM, it internally calls OpenAI methods, which results in OpenAI being initialized by default.
- This creates an issue where OpenAI is tracked as the primary provider, even when the request was routed via LiteLLM.
- We need to ensure that OpenAI is only tracked if it was explicitly used and **not** invoked indirectly through LiteLLM.

**How This Works:**
- The function checks the call stack (execution history) to determine the order in which modules were called.
- If LiteLLM appears in the call stack **before** OpenAI, then OpenAI was invoked via LiteLLM, meaning we should ignore OpenAI.
- If OpenAI appears first without LiteLLM, then OpenAI was used directly, and we should track it as expected.

**Return Value:**
- Returns `True` if the API call originated from LiteLLM.
- Returns `False` if OpenAI was directly called without going through LiteLLM.
"""

stack = inspect.stack()

Check warning on line 124 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L124

Added line #L124 was not covered by tests

litellm_seen = False # Track if LiteLLM was encountered in the stack
openai_seen = False # Track if OpenAI was encountered in the stack

Check warning on line 127 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L126-L127

Added lines #L126 - L127 were not covered by tests

for frame in stack:
module = inspect.getmodule(frame.frame)

Check warning on line 130 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L129-L130

Added lines #L129 - L130 were not covered by tests

module_name = module.__name__ if module else None

Check warning on line 132 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L132

Added line #L132 was not covered by tests

filename = frame.filename.lower()

Check warning on line 134 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L134

Added line #L134 was not covered by tests

if module_name and "litellm" in module_name or "litellm" in filename:
litellm_seen = True

Check warning on line 137 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L136-L137

Added lines #L136 - L137 were not covered by tests

if module_name and "openai" in module_name or "openai" in filename:
openai_seen = True

Check warning on line 140 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L139-L140

Added lines #L139 - L140 were not covered by tests

# If OpenAI is seen **before** LiteLLM, it means OpenAI was used directly, so return False
if not litellm_seen:
return False

Check warning on line 144 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L143-L144

Added lines #L143 - L144 were not covered by tests

# If LiteLLM was seen at any point before OpenAI, return True (indicating an indirect OpenAI call via LiteLLM)
return litellm_seen

Check warning on line 147 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L147

Added line #L147 was not covered by tests

def override_api(self):
"""
Overrides key methods of the specified API to record events.
"""

for api in self.SUPPORTED_APIS:
if api in sys.modules:
module = import_module(api)

if api == "litellm":
module_version = version(api)
if module_version is None:
Expand All @@ -116,22 +162,24 @@
if Version(module_version) >= parse("1.3.1"):
provider = LiteLLMProvider(self.client)
provider.override()
self.litellm_initialized = True

Check warning on line 165 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L165

Added line #L165 was not covered by tests
else:
logger.warning(f"Only LiteLLM>=1.3.1 supported. v{module_version} found.")
return # If using an abstraction like litellm, do not patch the underlying LLM APIs

if api == "openai":
# Patch openai v1.0.0+ methods
if hasattr(module, "__version__"):
module_version = parse(module.__version__)
if module_version >= parse("1.0.0"):
provider = OpenAiProvider(self.client)
provider.override()
else:
raise DeprecationWarning(
"OpenAI versions < 0.1 are no longer supported by AgentOps. Please upgrade OpenAI or "
"downgrade AgentOps to <=0.3.8."
)
# Ensure OpenAI is only initialized if it was NOT called inside LiteLLM
if not self._is_litellm_call():
if hasattr(module, "__version__"):
module_version = parse(module.__version__)
if module_version >= parse("1.0.0"):
provider = OpenAiProvider(self.client)
provider.override()

Check warning on line 177 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L172-L177

Added lines #L172 - L177 were not covered by tests
else:
raise DeprecationWarning(

Check warning on line 179 in agentops/llms/tracker.py

View check run for this annotation

Codecov / codecov/patch

agentops/llms/tracker.py#L179

Added line #L179 was not covered by tests
"OpenAI versions < 0.1 are no longer supported by AgentOps. Please upgrade OpenAI or "
"downgrade AgentOps to <=0.3.8."
)

if api == "cohere":
# Patch cohere v5.4.0+ methods
Expand Down
Loading