Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -274,10 +274,10 @@ We use Jupyter notebooks as integration tests for LLM providers. This approach:

The `agentops/llms/` directory contains provider implementations. Each provider must:

1. **Inherit from InstrumentedProvider**:
1. **Inherit from BaseProvider**:
```python
@singleton
class NewProvider(InstrumentedProvider):
class NewProvider(BaseProvider):
def __init__(self, client):
super().__init__(client)
self._provider_name = "ProviderName"
Expand Down Expand Up @@ -397,4 +397,4 @@ We encourage active community participation and are here to help!

## License

By contributing to AgentOps, you agree that your contributions will be licensed under the MIT License.
By contributing to AgentOps, you agree that your contributions will be licensed under the MIT License.
4 changes: 2 additions & 2 deletions agentops/llms/providers/ai21.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pprint
from typing import Optional

from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache

from agentops.event import ErrorEvent, LLMEvent, ActionEvent, ToolEvent
Expand All @@ -13,7 +13,7 @@


@singleton
class AI21Provider(InstrumentedProvider):
class AI21Provider(BaseProvider):
original_create = None
original_create_async = None
original_answer = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pprint
from typing import Optional

from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache

from agentops.event import ErrorEvent, LLMEvent, ToolEvent
Expand All @@ -13,7 +13,7 @@


@singleton
class AnthropicProvider(InstrumentedProvider):
class AnthropicProvider(BaseProvider):
original_create = None
original_create_async = None

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from agentops.event import LLMEvent


class InstrumentedProvider(ABC):
class BaseProvider(ABC):
_provider_name: str = "InstrumentedModel"
llm_event: Optional[LLMEvent] = None
client = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import pprint
from typing import Optional

from .instrumented_provider import InstrumentedProvider
from .base import BaseProvider
from agentops.event import ActionEvent, ErrorEvent, LLMEvent
from agentops.session import Session
from agentops.log_config import logger
Expand All @@ -11,7 +11,7 @@


@singleton
class CohereProvider(InstrumentedProvider):
class CohereProvider(BaseProvider):
original_create = None
original_create_stream = None
original_create_async = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/groq.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pprint
from typing import Optional

from .instrumented_provider import InstrumentedProvider
from .base import BaseProvider
from agentops.event import ErrorEvent, LLMEvent
from agentops.session import Session
from agentops.log_config import logger
Expand All @@ -10,7 +10,7 @@


@singleton
class GroqProvider(InstrumentedProvider):
class GroqProvider(BaseProvider):
original_create = None
original_async_create = None

Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/litellm.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@
from agentops.event import LLMEvent, ErrorEvent
from agentops.session import Session
from agentops.helpers import get_ISO_time, check_call_stack_for_agent_id
from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache
from agentops.singleton import singleton


@singleton
class LiteLLMProvider(InstrumentedProvider):
class LiteLLMProvider(BaseProvider):
original_create = None
original_create_async = None
original_oai_create = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/llama_stack_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from agentops.session import Session
from agentops.log_config import logger
from agentops.helpers import get_ISO_time, check_call_stack_for_agent_id
from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider


class LlamaStackClientProvider(InstrumentedProvider):
class LlamaStackClientProvider(BaseProvider):
original_complete = None
original_create_turn = None

Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/mistral.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
from agentops.session import Session
from agentops.log_config import logger
from agentops.helpers import get_ISO_time, check_call_stack_for_agent_id
from .instrumented_provider import InstrumentedProvider
from .base import BaseProvider


class MistralProvider(InstrumentedProvider):
class MistralProvider(BaseProvider):
original_complete = None
original_complete_async = None
original_stream = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@
from agentops.event import LLMEvent
from agentops.session import Session
from agentops.helpers import get_ISO_time, check_call_stack_for_agent_id
from .instrumented_provider import InstrumentedProvider
from .base import BaseProvider
from agentops.singleton import singleton

original_func = {}


@singleton
class OllamaProvider(InstrumentedProvider):
class OllamaProvider(BaseProvider):
original_create = None
original_create_async = None

Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/openai.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pprint
from typing import Optional

from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider
from agentops.time_travel import fetch_completion_override_from_time_travel_cache

from agentops.event import ActionEvent, ErrorEvent, LLMEvent
Expand All @@ -12,7 +12,7 @@


@singleton
class OpenAiProvider(InstrumentedProvider):
class OpenAiProvider(BaseProvider):
original_create = None
original_create_async = None
original_assistant_methods = None
Expand Down
4 changes: 2 additions & 2 deletions agentops/llms/providers/taskweaver.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@
from agentops.session import Session
from agentops.log_config import logger
from agentops.helpers import get_ISO_time, check_call_stack_for_agent_id
from agentops.llms.providers.instrumented_provider import InstrumentedProvider
from agentops.llms.providers.base import BaseProvider
from agentops.singleton import singleton


@singleton
class TaskWeaverProvider(InstrumentedProvider):
class TaskWeaverProvider(BaseProvider):
original_chat_completion = None

def __init__(self, client):
Expand Down
Loading