diff --git a/agentops/__init__.py b/agentops/__init__.py index 148dcc375..25e28dcf3 100755 --- a/agentops/__init__.py +++ b/agentops/__init__.py @@ -1,12 +1,29 @@ from typing import Any, Dict, List, Optional, Union -from agentops.legacy import ErrorEvent, ToolEvent, end_session, start_session +from agentops.legacy import ActionEvent, ErrorEvent, ToolEvent, start_session, end_session from .client import Client # Client global instance; one per process runtime _client = Client() +def record(event): + """ + Legacy function to record an event. This is kept for backward compatibility. + + In the current version, this simply sets the end_timestamp on the event. + + Args: + event: The event to record + """ + from agentops.helpers.time import get_ISO_time + + # TODO: Manual timestamp assignment is a temporary fix; should use proper event lifecycle + if event and hasattr(event, 'end_timestamp'): + event.end_timestamp = get_ISO_time() + + return event + def init( api_key: Optional[str] = None, @@ -139,6 +156,9 @@ def get_client() -> Client: "init", "configure", "get_client", + "record", "start_session", "end_session", + "track_agent", + "track_tool", ] diff --git a/agentops/client/client.py b/agentops/client/client.py index b0a18f1ca..bce0423ff 100644 --- a/agentops/client/client.py +++ b/agentops/client/client.py @@ -29,6 +29,8 @@ def __init__(self): self.config = Config() def init(self, **kwargs): + # Recreate the Config object to parse environment variables at the time of initialization + self.config = Config() self.configure(**kwargs) if not self.config.api_key: @@ -56,10 +58,18 @@ def init(self, **kwargs): self.initialized = True + # Start a session if auto_start_session is True + session = None if self.config.auto_start_session: from agentops.legacy import start_session - start_session() + # Pass default_tags if they exist + if self.config.default_tags: + session = start_session(tags=list(self.config.default_tags)) + else: + session = start_session() + + return session def configure(self, **kwargs): """Update client configuration""" diff --git a/agentops/config.py b/agentops/config.py index b496e8039..8ee08db22 100644 --- a/agentops/config.py +++ b/agentops/config.py @@ -20,6 +20,7 @@ class ConfigDict(TypedDict): api_key: Optional[str] endpoint: Optional[str] max_wait_time: Optional[int] + export_flush_interval: Optional[int] max_queue_size: Optional[int] default_tags: Optional[List[str]] instrument_llm_calls: Optional[bool] @@ -32,7 +33,7 @@ class ConfigDict(TypedDict): prefetch_jwt_token: Optional[bool] -@dataclass(slots=True) +@dataclass class Config: api_key: Optional[str] = field( default_factory=lambda: os.getenv("AGENTOPS_API_KEY"), @@ -48,6 +49,11 @@ class Config: default_factory=lambda: get_env_int("AGENTOPS_MAX_WAIT_TIME", 5000), metadata={"description": "Maximum time in milliseconds to wait for API responses"}, ) + + export_flush_interval: int = field( + default_factory=lambda: get_env_int("AGENTOPS_EXPORT_FLUSH_INTERVAL", 1000), + metadata={"description": "Time interval in milliseconds between automatic exports of telemetry data"}, + ) max_queue_size: int = field( default_factory=lambda: get_env_int("AGENTOPS_MAX_QUEUE_SIZE", 512), @@ -65,7 +71,7 @@ class Config: ) auto_start_session: bool = field( - default_factory=lambda: get_env_bool("AGENTOPS_AUTO_START_SESSION", False), + default_factory=lambda: get_env_bool("AGENTOPS_AUTO_START_SESSION", True), metadata={"description": "Whether to automatically start a session when initializing"}, ) @@ -85,7 +91,7 @@ class Config: ) log_level: Union[str, int] = field( - default_factory=lambda: os.getenv("AGENTOPS_LOG_LEVEL", "WARNING"), + default_factory=lambda: os.getenv("AGENTOPS_LOG_LEVEL", "INFO"), metadata={"description": "Logging level for AgentOps logs"}, ) @@ -119,6 +125,7 @@ def configure( api_key: Optional[str] = None, endpoint: Optional[str] = None, max_wait_time: Optional[int] = None, + export_flush_interval: Optional[int] = None, max_queue_size: Optional[int] = None, default_tags: Optional[List[str]] = None, instrument_llm_calls: Optional[bool] = None, @@ -147,6 +154,9 @@ def configure( if max_wait_time is not None: self.max_wait_time = max_wait_time + + if export_flush_interval is not None: + self.export_flush_interval = export_flush_interval if max_queue_size is not None: self.max_queue_size = max_queue_size @@ -170,7 +180,14 @@ def configure( self.env_data_opt_out = env_data_opt_out if log_level is not None: - self.log_level = log_level + if isinstance(log_level, str): + log_level_str = log_level.upper() + if hasattr(logging, log_level_str): + self.log_level = getattr(logging, log_level_str) + else: + self.log_level = logging.INFO + else: + self.log_level = log_level if fail_safe is not None: self.fail_safe = fail_safe @@ -195,6 +212,7 @@ def dict(self): "api_key": self.api_key, "endpoint": self.endpoint, "max_wait_time": self.max_wait_time, + "export_flush_interval": self.export_flush_interval, "max_queue_size": self.max_queue_size, "default_tags": self.default_tags, "instrument_llm_calls": self.instrument_llm_calls, diff --git a/agentops/instrumentation/__init__.py b/agentops/instrumentation/__init__.py index 7a28a7d58..37abbc50f 100644 --- a/agentops/instrumentation/__init__.py +++ b/agentops/instrumentation/__init__.py @@ -3,7 +3,7 @@ from dataclasses import dataclass import importlib -from opentelemetry.instrumentation.instrumentor import BaseInstrumentor +from opentelemetry.instrumentation.instrumentor import BaseInstrumentor # type: ignore from agentops.logging import logger from agentops.sdk.core import TracingCore diff --git a/agentops/legacy/__init__.py b/agentops/legacy/__init__.py index 25b385184..ea097dec1 100644 --- a/agentops/legacy/__init__.py +++ b/agentops/legacy/__init__.py @@ -1,22 +1,32 @@ """ -No-ops for deprecated functions and classes. +Compatibility layer for deprecated functions and classes. -CrewAI codebase contains an AgentOps integration which is now deprecated. +CrewAI contains direct integrations with AgentOps across multiple versions. +These integrations use different patterns: +- CrewAI < 0.105.0: Direct calls to agentops.end_session() with kwargs +- CrewAI >= 0.105.0: Event-based integration using Session objects -This maintains compatibility with codebases that adhere to the previous API. +This module maintains backward compatibility with all these API patterns. """ -from typing import Any, Dict, List, Tuple, Union - -from httpx import Client +from typing import Optional, Any, Dict, List, Tuple, Union from agentops.logging import logger +from agentops.sdk.core import TracingCore from agentops.semconv.span_kinds import SpanKind +from agentops.exceptions import AgentOpsClientNotInitializedException + +_current_session: Optional["Session"] = None class Session: """ - A legacy session object that holds a span and token. + This class provides compatibility with CrewAI >= 0.105.0, which uses an event-based + integration pattern where it calls methods directly on the Session object: + + - create_agent(): Called when a CrewAI agent is created + - record(): Called when a CrewAI tool is used + - end_session(): Called when a CrewAI run completes """ def __init__(self, span: Any, token: Any): @@ -29,66 +39,202 @@ def __del__(self): except: pass - def create_agent(self): + def create_agent(self, name: Optional[str] = None, agent_id: Optional[str] = None, **kwargs): + """ + Method to create an agent for CrewAI >= 0.105.0 compatibility. + + CrewAI >= 0.105.0 calls this with: + - name=agent.role + - agent_id=str(agent.id) + """ pass - def record(self): + def record(self, event=None): + """ + Method to record events for CrewAI >= 0.105.0 compatibility. + + CrewAI >= 0.105.0 calls this with a tool event when a tool is used. + """ pass - def end_session(self): + def end_session(self, **kwargs): + """ + Method to end the session for CrewAI >= 0.105.0 compatibility. + + CrewAI >= 0.105.0 calls this with: + - end_state="Success" + - end_state_reason="Finished Execution" + + forces a flush to ensure the span is exported immediately. + """ + _set_span_attributes(self.span, kwargs) self.span.end() + _flush_span_processors() + + +def _create_session_span(tags: Union[Dict[str, Any], List[str], None] = None) -> tuple: + """ + Helper function to create a session span with tags. + + This is an internal function used by start_session() to create the + from the SDK to create a span with kind=SpanKind.SESSION. + + Args: + tags: Optional tags to attach to the span. These tags will be + visible in the AgentOps dashboard and can be used for filtering. + + Returns: + A tuple of (span, context, token) where: + - context is the span context + - token is the context token needed for detaching + """ + from agentops.sdk.decorators.utility import _make_span + attributes = {} + if tags: + attributes["tags"] = tags + return _make_span("session", span_kind=SpanKind.SESSION, attributes=attributes) def start_session( tags: Union[Dict[str, Any], List[str], None] = None, ) -> Session: """ + @deprecated Start a new AgentOps session manually. This function creates and starts a new session span, which can be used to group related operations together. The session will remain active until end_session - is called with the returned span and token. - - This is a legacy function that uses start_span with span_kind=SpanKind.SESSION. + is called either with the Session object or with kwargs. + + Usage patterns: + 1. Standard pattern: session = start_session(); end_session(session) + 2. CrewAI < 0.105.0: start_session(); end_session(end_state="Success", ...) + 3. CrewAI >= 0.105.0: session = start_session(); session.end_session(end_state="Success", ...) + + This function stores the session in a global variable to support the CrewAI + < 0.105.0 pattern where end_session is called without the session object. Args: - name: Name of the session - attributes: Optional {key: value} dict - tags: Optional | forwards to `attributes` + tags: Optional tags to attach to the session, useful for filtering in the dashboard. + Can be a list of strings or a dict of key-value pairs. Returns: - A Session object that should be passed to end_session + A Session object that should be passed to end_session (except in the + CrewAI < 0.105.0 pattern where end_session is called with kwargs only) + + Raises: + AgentOpsClientNotInitializedException: If the client is not initialized """ - from agentops import Client - if not Client().initialized: + global _current_session + + if not TracingCore.get_instance().initialized: + from agentops import Client Client().init() + + span, context, token = _create_session_span(tags) + session = Session(span, token) + _current_session = session + return session - from agentops.sdk.decorators.utility import _make_span - attributes = {} - if tags: - attributes["tags"] = tags - span, context, token = _make_span('session', span_kind=SpanKind.SESSION, attributes=attributes) - return Session(span, token) + +def _set_span_attributes(span: Any, attributes: Dict[str, Any]) -> None: + """ + Helper to set attributes on a span. + + Args: + span: The span to set attributes on + attributes: The attributes to set as a dictionary + """ + if not attributes or not hasattr(span, "set_attribute"): + return + + for key, value in attributes.items(): + span.set_attribute(f"agentops.status.{key}", str(value)) -def end_session(session: Session) -> None: +def _flush_span_processors() -> None: """ + Helper to force flush all span processors. + """ + try: + from opentelemetry.trace import get_tracer_provider + tracer_provider = get_tracer_provider() + tracer_provider.force_flush() # type: ignore + except Exception as e: + logger.warning(f"Failed to force flush span processor: {e}") + + +def end_session(session_or_status: Any = None, **kwargs) -> None: + """ + @deprecated End a previously started AgentOps session. This function ends the session span and detaches the context token, completing the session lifecycle. - This is a legacy function that uses end_span. + This function supports multiple calling patterns for backward compatibility: + 1. With a Session object: Used by most code and CrewAI >= 0.105.0 event system + 2. With named parameters only: Used by CrewAI < 0.105.0 direct integration + 3. With a string status: Used by some older code Args: - session: The session object returned by start_session + session_or_status: The session object returned by start_session, + or a string representing the status (for backwards compatibility) + **kwargs: Additional arguments for CrewAI < 0.105.0 compatibility. + CrewAI < 0.105.0 passes these named arguments: + - end_state="Success" + - end_state_reason="Finished Execution" + - is_auto_end=True + + When called this way, the function will use the most recently + created session via start_session(). """ from agentops.sdk.decorators.utility import _finalize_span - _finalize_span(session.span, session.token) + + from agentops.sdk.core import TracingCore + if not TracingCore.get_instance().initialized: + logger.debug("Ignoring end_session call - TracingCore not initialized") + return + + # In some old implementations, and in crew < 0.10.5 `end_session` will be + # called with a single string as a positional argument like: "Success" + + # Handle the CrewAI < 0.105.0 integration pattern where end_session is called + # with only named parameters. In this pattern, CrewAI does not keep a reference + # to the Session object, instead it calls: + # + # agentops.end_session( + # end_state="Success", + # end_state_reason="Finished Execution", + # is_auto_end=True + # ) + if session_or_status is None and kwargs: + global _current_session + + if _current_session is not None: + _set_span_attributes(_current_session.span, kwargs) + _finalize_span(_current_session.span, _current_session.token) + _flush_span_processors() + _current_session = None + return + + # Handle the standard pattern and CrewAI >= 0.105.0 pattern where a Session object is passed. + # In both cases, we call _finalize_span with the span and token from the Session. + # This is the most direct and precise way to end a specific session. + if hasattr(session_or_status, 'span') and hasattr(session_or_status, 'token'): + _set_span_attributes(session_or_status.span, kwargs) + _finalize_span(session_or_status.span, session_or_status.token) + _flush_span_processors() + def end_all_sessions(): - pass + """ + @deprecated + We don't automatically track more than one session, so just end the session + that we are tracking. + """ + end_session() def ToolEvent(*args, **kwargs) -> None: @@ -99,20 +245,40 @@ def ToolEvent(*args, **kwargs) -> None: return None -def ErrorEvent(*args, **kwargs) -> None: +def ErrorEvent(*args, **kwargs): """ @deprecated Use tracing instead. + + For backward compatibility with tests, this returns a minimal object with the + required attributes. """ - return None + from agentops.helpers.time import get_ISO_time + + class LegacyErrorEvent: + def __init__(self): + self.init_timestamp = get_ISO_time() + self.end_timestamp = None + + return LegacyErrorEvent() -def ActionEvent(*args, **kwargs) -> None: +def ActionEvent(*args, **kwargs): """ @deprecated Use tracing instead. + + For backward compatibility with tests, this returns a minimal object with the + required attributes. """ - return None + from agentops.helpers.time import get_ISO_time + + class LegacyActionEvent: + def __init__(self): + self.init_timestamp = get_ISO_time() + self.end_timestamp = None + + return LegacyActionEvent() def LLMEvent(*args, **kwargs) -> None: @@ -122,16 +288,34 @@ def LLMEvent(*args, **kwargs) -> None: """ return None + def track_agent(*args, **kwargs): - """@deprecated""" - pass + """ + @deprecated + Decorator for marking agents in legacy projects. + """ + def noop(f): + return f + return noop + + +def track_tool(*args, **kwargs): + """ + @deprecated + Decorator for marking tools and legacy projects. + """ + def noop(f): + return f + return noop + __all__ = [ - "start_session", - "end_session", - "ToolEvent", - "ErrorEvent", - "ActionEvent", - "track_agent", + "start_session", + "end_session", + "ToolEvent", + "ErrorEvent", + "ActionEvent", + "track_agent", + "track_tool", "end_all_sessions" ] diff --git a/agentops/legacy/crewai.md b/agentops/legacy/crewai.md new file mode 100644 index 000000000..5a8a7a6af --- /dev/null +++ b/agentops/legacy/crewai.md @@ -0,0 +1,121 @@ +# CrewAI Integration Reference + +## Overview +This document provides information about CrewAI's integration with AgentOps and how our legacy compatibility layer supports different versions of CrewAI. + +## CrewAI Integration Points + +CrewAI has two distinct integration patterns with AgentOps: + +### 1. Direct Integration (CrewAI < 0.105.0) +In CrewAI versions 0.98.0 through 0.102.0, integration is done directly in the core code: + +- In `crew.py` (_finish_execution method): + ```python + if agentops: + agentops.end_session( + end_state="Success", + end_state_reason="Finished Execution", + is_auto_end=True, + ) + ``` + +- In `tools/tool_usage.py`: + ```python + # Tool event creation + tool_event = agentops.ToolEvent(name=calling.tool_name) if agentops else None + + # Error recording + if agentops: + agentops.record(agentops.ErrorEvent(exception=e, trigger_event=tool_event)) + + # Tool usage recording + if agentops: + agentops.record(tool_event) + ``` + +### 2. Event-Based Integration (CrewAI >= 0.105.0) +In CrewAI versions 0.105.0 and above, integration uses an event-based system: + +```python +# In utilities/events/third_party/agentops_listener.py +class AgentOpsListener(BaseEventListener): + # Called when a crew kickoff starts + @crewai_event_bus.on(CrewKickoffStartedEvent) + def on_crew_kickoff_started(source, event): + self.session = agentops.init() + for agent in source.agents: + if self.session: + self.session.create_agent( + name=agent.role, + agent_id=str(agent.id), + ) + + # Called when a crew kickoff completes + @crewai_event_bus.on(CrewKickoffCompletedEvent) + def on_crew_kickoff_completed(source, event): + if self.session: + self.session.end_session( + end_state="Success", + end_state_reason="Finished Execution", + ) + + # Tool usage and other events are also tracked + # ... +``` + +## Required AgentOps Legacy API + +To maintain compatibility with all CrewAI versions, our legacy API must support: + +### Function Signatures + +| Function | Parameters | Used By | +|----------|------------|---------| +| `agentops.init()` | - | All versions, returns a Session object | +| `agentops.end_session()` | Various (see below) | All versions | +| `agentops.record()` | Event object | CrewAI < 0.105.0 | +| `agentops.ToolEvent()` | `name` | CrewAI < 0.105.0 | +| `agentops.ErrorEvent()` | `exception`, `trigger_event` | CrewAI < 0.105.0 | +| `agentops.ActionEvent()` | `action_type` | Used in tests | + +### Supported `end_session()` Calls + +The `end_session()` function must handle: + +1. A simple string status: + ```python + agentops.end_session("Success") + ``` + +2. Named arguments from CrewAI < 0.105.0: + ```python + agentops.end_session( + end_state="Success", + end_state_reason="Finished Execution", + is_auto_end=True + ) + ``` + +3. Session object method calls from CrewAI >= 0.105.0: + ```python + session.end_session( + end_state="Success", + end_state_reason="Finished Execution" + ) + ``` + +### Session Class Methods + +The Session class must support: + +1. `create_agent(name, agent_id)` - Used in CrewAI >= 0.105.0 +2. `record(event)` - Used in CrewAI >= 0.105.0 +3. `end_session(**kwargs)` - Used in CrewAI >= 0.105.0 + +## Implementation Guidelines + +- All legacy interfaces should accept their parameters without errors but don't need to implement actual functionality. +- New code should use OpenTelemetry instrumentation instead of these legacy interfaces. +- This compatibility layer will be maintained until CrewAI migrates to using OpenTelemetry directly. +- Tests ensure backward compatibility with both integration patterns. \ No newline at end of file diff --git a/agentops/legacy/event.py b/agentops/legacy/event.py index abffcacc4..8ed3c87eb 100644 --- a/agentops/legacy/event.py +++ b/agentops/legacy/event.py @@ -11,7 +11,7 @@ from typing import Any, Dict, List, Optional, Sequence, Union from uuid import UUID, uuid4 -from .helpers import check_call_stack_for_agent_id, get_ISO_time +from agentops.helpers import get_ISO_time class EventType(Enum): @@ -46,12 +46,12 @@ class Event: } """ - event_type: EventType + event_type: str params: Optional[dict] = None returns: Optional[Union[str, List[str]]] = None init_timestamp: str = field(default_factory=get_ISO_time) end_timestamp: Optional[str] = None - agent_id: Optional[UUID] = field(default_factory=check_call_stack_for_agent_id) + agent_id: Optional[UUID] = None id: UUID = field(default_factory=uuid4) session_id: Optional[UUID] = None diff --git a/agentops/logging/config.py b/agentops/logging/config.py index a51a09bc8..3abfa2d12 100644 --- a/agentops/logging/config.py +++ b/agentops/logging/config.py @@ -28,7 +28,15 @@ def configure_logging(config=None): # Remove type hint temporarily to avoid cir if log_level_env and hasattr(logging, log_level_env): log_level = getattr(logging, log_level_env) else: - log_level = config.log_level if isinstance(config.log_level, int) else logging.CRITICAL + # Handle string log levels from config + if isinstance(config.log_level, str): + log_level_str = config.log_level.upper() + if hasattr(logging, log_level_str): + log_level = getattr(logging, log_level_str) + else: + log_level = logging.INFO + else: + log_level = config.log_level if isinstance(config.log_level, int) else logging.INFO logger.setLevel(log_level) @@ -38,7 +46,7 @@ def configure_logging(config=None): # Remove type hint temporarily to avoid cir # Configure console logging stream_handler = logging.StreamHandler() - stream_handler.setLevel(logging.DEBUG) + stream_handler.setLevel(log_level) stream_handler.setFormatter(AgentOpsLogFormatter()) logger.addHandler(stream_handler) @@ -46,7 +54,7 @@ def configure_logging(config=None): # Remove type hint temporarily to avoid cir log_to_file = os.environ.get("AGENTOPS_LOGGING_TO_FILE", "True").lower() == "true" if log_to_file: file_handler = logging.FileHandler("agentops.log", mode="w") - file_handler.setLevel(logging.DEBUG) + file_handler.setLevel(log_level) formatter = AgentOpsLogFileFormatter("%(asctime)s - %(levelname)s - %(message)s") file_handler.setFormatter(formatter) logger.addHandler(file_handler) diff --git a/agentops/sdk/core.py b/agentops/sdk/core.py index 26882ee4e..540089e8c 100644 --- a/agentops/sdk/core.py +++ b/agentops/sdk/core.py @@ -5,8 +5,10 @@ from typing import List, Optional from opentelemetry import metrics, trace -from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter -from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter +from opentelemetry.exporter.otlp.proto.http.metric_exporter import \ + OTLPMetricExporter +from opentelemetry.exporter.otlp.proto.http.trace_exporter import \ + OTLPSpanExporter from opentelemetry.sdk.metrics import MeterProvider from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader from opentelemetry.sdk.resources import Resource @@ -22,6 +24,77 @@ # No need to create shortcuts since we're using our own ResourceAttributes class now +def setup_telemetry( + service_name: str = "agentops", + project_id: Optional[str] = None, + exporter_endpoint: str = "https://otlp.agentops.ai/v1/traces", + metrics_endpoint: str = "https://otlp.agentops.ai/v1/metrics", + max_queue_size: int = 512, + max_wait_time: int = 5000, + export_flush_interval: int = 1000, + jwt: Optional[str] = None, +) -> tuple[TracerProvider, MeterProvider]: + """ + Setup the telemetry system. + + Args: + service_name: Name of the OpenTelemetry service + project_id: Project ID to include in resource attributes + exporter_endpoint: Endpoint for the span exporter + metrics_endpoint: Endpoint for the metrics exporter + max_queue_size: Maximum number of spans to queue before forcing a flush + max_wait_time: Maximum time in milliseconds to wait before flushing + export_flush_interval: Time interval in milliseconds between automatic exports of telemetry data + jwt: JWT token for authentication + + Returns: + Tuple of (TracerProvider, MeterProvider) + """ + # Create resource attributes dictionary + resource_attrs = {ResourceAttributes.SERVICE_NAME: service_name} + + # Add project_id to resource attributes if available + if project_id: + # Add project_id as a custom resource attribute + resource_attrs[ResourceAttributes.PROJECT_ID] = project_id + logger.debug(f"Including project_id in resource attributes: {project_id}") + + resource = Resource(resource_attrs) + provider = TracerProvider(resource=resource) + + # Set as global provider + trace.set_tracer_provider(provider) + + # Create exporter with authentication + exporter = OTLPSpanExporter( + endpoint=exporter_endpoint, + headers={"Authorization": f"Bearer {jwt}"} if jwt else {} + ) + + # Regular processor for normal spans and immediate export + processor = BatchSpanProcessor( + exporter, + max_export_batch_size=max_queue_size, + schedule_delay_millis=export_flush_interval, + ) + provider.add_span_processor(processor) + provider.add_span_processor(InternalSpanProcessor()) # Catches spans for AgentOps on-terminal printing + + # Setup metrics + metric_reader = PeriodicExportingMetricReader( + OTLPMetricExporter( + endpoint=metrics_endpoint, + headers={"Authorization": f"Bearer {jwt}"} if jwt else {} + ) + ) + meter_provider = MeterProvider(resource=resource, metric_readers=[metric_reader]) + metrics.set_meter_provider(meter_provider) + + logger.debug("Telemetry system initialized") + + return provider, meter_provider + + class TracingCore: """ Central component for tracing in AgentOps. @@ -45,7 +118,6 @@ def get_instance(cls) -> TracingCore: def __init__(self): """Initialize the tracing core.""" self._provider = None - self._processors: List[SpanProcessor] = [] self._initialized = False self._config = None @@ -57,6 +129,7 @@ def initialize(self, jwt: Optional[str] = None, **kwargs) -> None: Initialize the tracing core with the given configuration. Args: + jwt: JWT token for authentication **kwargs: Configuration parameters for tracing service_name: Name of the service exporter: Custom span exporter @@ -75,69 +148,39 @@ def initialize(self, jwt: Optional[str] = None, **kwargs) -> None: return # Set default values for required fields - max_queue_size = kwargs.get("max_queue_size", 512) - max_wait_time = kwargs.get("max_wait_time", 5000) + kwargs.setdefault("service_name", "agentops") + kwargs.setdefault("exporter_endpoint", "https://otlp.agentops.ai/v1/traces") + kwargs.setdefault("metrics_endpoint", "https://otlp.agentops.ai/v1/metrics") + kwargs.setdefault("max_queue_size", 512) + kwargs.setdefault("max_wait_time", 5000) + kwargs.setdefault("export_flush_interval", 1000) # Create a TracingConfig from kwargs with proper defaults config: TracingConfig = { - "service_name": kwargs.get("service_name", "agentops"), - "exporter": kwargs.get("exporter"), - "processor": kwargs.get("processor"), - "exporter_endpoint": kwargs.get("exporter_endpoint", "https://otlp.agentops.ai/v1/traces"), - "metrics_endpoint": kwargs.get("metrics_endpoint", "https://otlp.agentops.ai/v1/metrics"), - "max_queue_size": max_queue_size, - "max_wait_time": max_wait_time, + "service_name": kwargs["service_name"], + "exporter_endpoint": kwargs["exporter_endpoint"], + "metrics_endpoint": kwargs["metrics_endpoint"], + "max_queue_size": kwargs["max_queue_size"], + "max_wait_time": kwargs["max_wait_time"], + "export_flush_interval": kwargs["export_flush_interval"], "api_key": kwargs.get("api_key"), "project_id": kwargs.get("project_id"), } self._config = config - # Span types are registered in the constructor - # No need to register them here anymore - - # Create provider with safe access to service_name - service_name = config.get("service_name") or "agentops" - - # Create resource attributes dictionary - resource_attrs = {ResourceAttributes.SERVICE_NAME: service_name} - - # Add project_id to resource attributes if available - project_id = config.get("project_id") - if project_id: - # Add project_id as a custom resource attribute - resource_attrs[ResourceAttributes.PROJECT_ID] = project_id - logger.debug(f"Including project_id in resource attributes: {project_id}") - - resource = Resource(resource_attrs) - self._provider = TracerProvider(resource=resource) - - # Set as global provider - trace.set_tracer_provider(self._provider) - - # Use default authenticated processor and exporter if api_key is available - exporter = OTLPSpanExporter( - endpoint=config.get("exporter_endpoint"), headers={"Authorization": f"Bearer {kwargs.get('jwt')}"} + # Setup telemetry using the extracted configuration + self._provider, self._meter_provider = setup_telemetry( + service_name=config["service_name"] or "", + project_id=config.get("project_id"), + exporter_endpoint=config["exporter_endpoint"] or "", + metrics_endpoint=config["metrics_endpoint"] or "", + max_queue_size=config["max_queue_size"], + max_wait_time=config["max_wait_time"], + export_flush_interval=config["export_flush_interval"], + jwt=jwt, ) - # Regular processor for normal spans and immediate export - processor = BatchSpanProcessor( - exporter, - max_export_batch_size=config.get("max_queue_size", max_queue_size), - schedule_delay_millis=config.get("max_wait_time", max_wait_time), - ) - self._provider.add_span_processor(processor) - self._provider.add_span_processor( - InternalSpanProcessor() - ) # Catches spans for AgentOps on-terminal printing - self._processors.append(processor) - - metric_reader = PeriodicExportingMetricReader( - OTLPMetricExporter( - endpoint=config.get("metrics_endpoint"), headers={"Authorization": f"Bearer {kwargs.get('jwt')}"} - ) - ) - meter_provider = MeterProvider(resource=resource, metric_readers=[metric_reader]) - metrics.set_meter_provider(meter_provider) + self._initialized = True logger.debug("Tracing core initialized") @@ -146,21 +189,19 @@ def initialized(self) -> bool: """Check if the tracing core is initialized.""" return self._initialized + @property + def config(self) -> TracingConfig: + """Get the tracing configuration.""" + return self._config # type: ignore + def shutdown(self) -> None: """Shutdown the tracing core.""" - if not self._initialized: - return with self._lock: + # Perform a single flush on the SynchronousSpanProcessor (which takes care of all processors' shutdown) if not self._initialized: return - - # Flush processors - for processor in self._processors: - try: - processor.force_flush() - except Exception as e: - logger.warning(f"Error flushing processor: {e}") + self._provider._active_span_processor.force_flush(self.config['max_wait_time']) # type: ignore # Shutdown provider if self._provider: @@ -170,7 +211,6 @@ def shutdown(self) -> None: logger.warning(f"Error shutting down provider: {e}") self._initialized = False - logger.debug("Tracing core shutdown") def get_tracer(self, name: str = "agentops") -> trace.Tracer: """ @@ -215,6 +255,7 @@ def initialize_from_config(cls, config, **kwargs): "exporter_endpoint": getattr(config, "exporter_endpoint", None), "max_queue_size": getattr(config, "max_queue_size", 512), "max_wait_time": getattr(config, "max_wait_time", 5000), + "export_flush_interval": getattr(config, "export_flush_interval", 1000), "api_key": getattr(config, "api_key", None), "project_id": getattr(config, "project_id", None), "endpoint": getattr(config, "endpoint", None), diff --git a/agentops/sdk/types.py b/agentops/sdk/types.py index 635cd10c9..b8af98d1e 100644 --- a/agentops/sdk/types.py +++ b/agentops/sdk/types.py @@ -18,3 +18,4 @@ class TracingConfig(TypedDict, total=False): project_id: Optional[str] # Project ID to include in resource attributes max_queue_size: int # Required with a default value max_wait_time: int # Required with a default value + export_flush_interval: int # Time interval between automatic exports diff --git a/conftest.py b/conftest.py deleted file mode 100644 index 954ca8f7f..000000000 --- a/conftest.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Shared fixtures for pytest tests. -""" - -import pytest -from unittest.mock import MagicMock, patch - -from opentelemetry.trace import Span - - -@pytest.fixture -def mock_span(): - """Fixture to create a mock span with a trace ID.""" - span = MagicMock(spec=Span) - span.get_span_context.return_value.trace_id = 123456789 - return span - - -@pytest.fixture -def mock_context_deps(): - """Fixture to mock the context dependencies.""" - with ( - patch("agentops.sdk.decorators.context_utils.context") as mock_context, - patch("agentops.sdk.decorators.context_utils.trace") as mock_trace, - patch("agentops.sdk.decorators.context_utils.logger") as mock_logger, - ): - # Set up the mocks - mock_context.get_current.return_value = "current_context" - mock_trace.set_span_in_context.return_value = "new_context" - mock_context.attach.return_value = "token" - - yield {"context": mock_context, "trace": mock_trace, "logger": mock_logger} diff --git a/examples/agents-examples/basic/hello_world.py b/examples/agents-examples/basic/hello_world.py index e9cef2735..d4f2264c2 100644 --- a/examples/agents-examples/basic/hello_world.py +++ b/examples/agents-examples/basic/hello_world.py @@ -1,16 +1,13 @@ import asyncio - from agents import Agent, Runner - from dotenv import load_dotenv import os -import agentops load_dotenv() -AGENTOPS_API_KEY = os.getenv("AGENTOPS_API_KEY") or "your-api-key" -agentops.init(api_key=AGENTOPS_API_KEY) +import agentops +agentops.init() async def main(): agent = Agent( diff --git a/examples/crewai-basic.py b/examples/crewai-basic.py index b897ec7fd..56532b3c3 100644 --- a/examples/crewai-basic.py +++ b/examples/crewai-basic.py @@ -1,3 +1,6 @@ +from dotenv import load_dotenv +load_dotenv() + import agentops from crewai import Agent, Crew, Task from crewai.tools import tool diff --git a/pyproject.toml b/pyproject.toml index a0d386f52..0185f606b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "agentops" -version = "0.4.3" +version = "0.4.4" authors = [ { name="Alex Reibman", email="areibman@gmail.com" }, { name="Shawn Qiu", email="siyangqiu@gmail.com" }, @@ -34,24 +34,26 @@ dependencies = [ "termcolor>=2.3.0,<2.5.0", "PyYAML>=5.3,<7.0", "packaging>=21.0,<25.0", # Lower bound of 21.0 ensures compatibility with Python 3.9+ - "opentelemetry-api==1.22.0; python_version<'3.10'", - "opentelemetry-api>=1.27.0; python_version>='3.10'", - "opentelemetry-sdk==1.22.0; python_version<'3.10'", - "opentelemetry-sdk>=1.27.0; python_version>='3.10'", - "opentelemetry-exporter-otlp-proto-http==1.22.0; python_version<'3.10'", - "opentelemetry-exporter-otlp-proto-http>=1.27.0; python_version>='3.10'", + "opentelemetry-sdk==1.29.0; python_version<'3.10'", + "opentelemetry-sdk>1.29.0; python_version>='3.10'", + "opentelemetry-api==1.29.0; python_version<'3.10'", + "opentelemetry-api>1.29.0; python_version>='3.10'", + "opentelemetry-exporter-otlp-proto-http==1.29.0; python_version<'3.10'", + "opentelemetry-exporter-otlp-proto-http>1.29.0; python_version>='3.10'", # "opentelemetry-exporter-otlp-proto-grpc==1.22.0; python_version<'3.10'", # "opentelemetry-exporter-otlp-proto-grpc>=1.27.0; python_version>='3.10'", "ordered-set>=4.0.0,<5.0.0", "wrapt>=1.0.0,<2.0.0", - "opentelemetry-instrumentation>=0.48b0", - "opentelemetry-semantic-conventions>=0.43b0", - "opentelemetry-semantic-conventions-ai>=0.4.2", + # "opentelemetry-instrumentation", + "opentelemetry-instrumentation==0.50b0; python_version<'3.10'", + "opentelemetry-instrumentation>0.50b0; python_version>='3.10'", + "opentelemetry-semantic-conventions==0.50b0; python_version<'3.10'", + "opentelemetry-semantic-conventions>0.50b0; python_version>='3.10'", ] [dependency-groups] test = [ - "openai>=1.0.0", + "openai>=1.60.0", "anthropic", # ;; # The below is a really hard dependency, that can be installed only between python >=3.10,<3.13. @@ -93,18 +95,11 @@ Issues = "https://github.com/AgentOps-AI/agentops/issues" [tool.uv] compile-bytecode = true # Enable bytecode compilation for better performance +resolution = "highest" default-groups = ["test", "dev"] # Default groups to install for development constraint-dependencies = [ "pydantic>=2.8.0; python_version>='3.13'", # Ensure Python 3.13 compatibility "typing-extensions; python_version>='3.13'", # Required for Pydantic with Python 3.13 - # For Python 3.9, use original OpenTelemetry versions - "opentelemetry-api==1.22.0; python_version<'3.10'", - "opentelemetry-sdk==1.22.0; python_version<'3.10'", - "opentelemetry-exporter-otlp-proto-http==1.22.0; python_version<'3.10'", - # For Python ≥3.10 (where autogen-core might be present), use newer versions - "opentelemetry-api>=1.27.0; python_version>='3.10'", - "opentelemetry-sdk>=1.27.0; python_version>='3.10'", - "opentelemetry-exporter-otlp-proto-http>=1.27.0; python_version>='3.10'", ] [tool.autopep8] diff --git a/test_context.py b/test_context.py deleted file mode 100644 index afaf49b18..000000000 --- a/test_context.py +++ /dev/null @@ -1,62 +0,0 @@ -#!/usr/bin/env python -""" -Test script to debug OpenTelemetry context propagation issues. -""" -import time -from opentelemetry import trace -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import ConsoleSpanExporter, BatchSpanProcessor -from agentops.sdk.decorators import agent, task, operation -from agentops.sdk.core import TracingCore -from agentops.client.client import Client -from agentops.sdk.decorators.utility import _get_current_span_info -from agentops.logging import logger - -# Initialize tracing -client = Client() # Use default initialization -client.init() # This should set up TracingCore - -# Add a console exporter for local debugging -provider = trace.get_tracer_provider() -if hasattr(provider, "add_span_processor"): - provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) - -@agent -def my_agent(): - """Test agent function that should create a parent span""" - logger.debug(f"In my_agent - current span: {_get_current_span_info()}") - - # Call the task inside the agent - result = my_task() - - # Also explicitly call operation with a context manager - tracer = TracingCore.get_instance().get_tracer() - with tracer.start_as_current_span("manual_operation") as manual_span: - manual_span.set_attribute("manual", True) - logger.debug(f"In manual operation - current span: {_get_current_span_info()}") - time.sleep(0.1) - - return result - -@task -def my_task(): - """Test task function that should create a child span under the agent span""" - logger.debug(f"In my_task - current span: {_get_current_span_info()}") - - # Call a nested operation - return my_operation() - -@operation -def my_operation(): - """Test operation that should be nested under the task span""" - logger.debug(f"In my_operation - current span: {_get_current_span_info()}") - time.sleep(0.1) - return "done" - -if __name__ == "__main__": - # Run the test - result = my_agent() - print(f"Result: {result}") - - # Give the batch processor time to export - time.sleep(1) \ No newline at end of file diff --git a/test_context_comparison.py b/test_context_comparison.py deleted file mode 100644 index db2de3825..000000000 --- a/test_context_comparison.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python -""" -Test script to compare the old and new context management approaches. -""" -import time -from opentelemetry import trace, context as context_api -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.sdk.trace.export import ConsoleSpanExporter, BatchSpanProcessor -from agentops.sdk.decorators import agent, task, operation -from agentops.sdk.core import TracingCore -from agentops.client.client import Client -from agentops.sdk.decorators.utility import (_get_current_span_info, _make_span, - _finalize_span, _create_as_current_span) -from agentops.logging import logger - -# Initialize tracing -client = Client() -client.init() - -# Add a console exporter for local debugging -provider = trace.get_tracer_provider() -if hasattr(provider, "add_span_processor"): - provider.add_span_processor(BatchSpanProcessor(ConsoleSpanExporter())) - -def test_manual_context(): - """Test using the manual context management approach""" - logger.debug("===== TESTING MANUAL CONTEXT APPROACH =====") - - # Create the root span - root_span, root_ctx, root_token = _make_span("root", "test") - logger.debug(f"Created root span: {_get_current_span_info()}") - - try: - # Create a child span - child_span, child_ctx, child_token = _make_span("child", "test") - logger.debug(f"Created child span: {_get_current_span_info()}") - - try: - # Create a grandchild span - grandchild_span, grandchild_ctx, grandchild_token = _make_span("grandchild", "test") - logger.debug(f"Created grandchild span: {_get_current_span_info()}") - - # Do some work - time.sleep(0.1) - - # End the grandchild span - _finalize_span(grandchild_span, grandchild_token) - logger.debug(f"After ending grandchild span: {_get_current_span_info()}") - - finally: - # End the child span - _finalize_span(child_span, child_token) - logger.debug(f"After ending child span: {_get_current_span_info()}") - - finally: - # End the root span - _finalize_span(root_span, root_token) - logger.debug(f"After ending root span: {_get_current_span_info()}") - -def test_context_manager(): - """Test using the context manager approach""" - logger.debug("===== TESTING CONTEXT MANAGER APPROACH =====") - - # Get a tracer - tracer = TracingCore.get_instance().get_tracer() - - # Create spans using context manager (native OpenTelemetry approach) - with _create_as_current_span("root", "test") as root_span: - logger.debug(f"Created root span: {_get_current_span_info()}") - - with _create_as_current_span("child", "test") as child_span: - logger.debug(f"Created child span: {_get_current_span_info()}") - - with _create_as_current_span("grandchild", "test") as grandchild_span: - logger.debug(f"Created grandchild span: {_get_current_span_info()}") - - # Do some work - time.sleep(0.1) - - logger.debug(f"After grandchild span: {_get_current_span_info()}") - - logger.debug(f"After child span: {_get_current_span_info()}") - - logger.debug(f"After root span: {_get_current_span_info()}") - -if __name__ == "__main__": - # Test both approaches - test_manual_context() - test_context_manager() - - # Give the batch processor time to export - time.sleep(1) \ No newline at end of file diff --git a/test_nesting.py b/test_nesting.py deleted file mode 100644 index 5cf686b4e..000000000 --- a/test_nesting.py +++ /dev/null @@ -1,32 +0,0 @@ -import time -from agentops.sdk.decorators import agent, operation -from agentops.sdk.core import TracingCore - -# Initialize tracing -TracingCore.get_instance().initialize() - -@operation -def perform_operation(task_name): - """A simple operation that will be nested within an agent.""" - print(f"Performing operation: {task_name}") - time.sleep(0.5) # Simulate work - return f"Completed {task_name}" - -@agent -def run_agent(agent_name): - """An agent that will contain nested operations.""" - print(f"Agent {agent_name} is running") - - # Perform multiple operations - result1 = perform_operation("task1") - result2 = perform_operation("task2") - - return f"Agent {agent_name} completed with results: {result1}, {result2}" - -if __name__ == "__main__": - # Run the agent which will contain nested operations - result = run_agent("TestAgent") - print(f"Final result: {result}") - - # Give time for spans to be exported - time.sleep(1) \ No newline at end of file diff --git a/tests/benchmark/benchmark_init.py b/tests/benchmark/benchmark_init.py new file mode 100644 index 000000000..674062a0f --- /dev/null +++ b/tests/benchmark/benchmark_init.py @@ -0,0 +1,47 @@ +import json +import time + +from agentops.sdk.core import TracingCore + +""" +Benchmark script for measuring TracingCore initialization time. +""" + +def run_benchmark(): + """ + Run a benchmark of TracingCore initialization. + + Returns: + Dictionary with timing results + """ + import agentops + + # Measure initialization time + start_init = time.time() + agentops.init() + end_init = time.time() + init_time = end_init - start_init + + return { + "init": init_time, + "total": init_time # Total time is just init time now + } + + +def print_results(results): + """ + Print benchmark results in a formatted way. + + Args: + results: Dictionary with timing results + """ + print("\n=== BENCHMARK RESULTS ===") + + print(f"\nINIT TIME: {results['init']:.6f}s") + print(f"TOTAL TIME: {results['total']:.6f}s") + + +if __name__ == "__main__": + print("Running TracingCore benchmark...") + results = run_benchmark() + print_results(results) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 14f7c1bc6..742c1c89f 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -6,11 +6,11 @@ import pytest import requests_mock -from tests.unit.sdk.instrumentation_tester import InstrumentationTester import agentops from agentops.config import Config from tests.fixtures.client import * # noqa +from tests.unit.sdk.instrumentation_tester import InstrumentationTester @pytest.fixture @@ -26,13 +26,14 @@ def endpoint() -> str: @pytest.fixture(autouse=True) -def mock_req(endpoint): +def mock_req(endpoint, api_key): """ Mocks AgentOps backend API requests. """ with requests_mock.Mocker(real_http=False) as m: # Map session IDs to their JWTs - m.post(endpoint + "/v3/auth/token", json={"token": str(uuid.uuid4())}) + m.post(endpoint + "/v3/auth/token", json={"token": str(uuid.uuid4()), + "project_id": "test-project-id", "api_key": api_key}) yield m diff --git a/tests/unit/sdk/instrumentation_tester.py b/tests/unit/sdk/instrumentation_tester.py index 48e7fd63c..9e5dc80d5 100644 --- a/tests/unit/sdk/instrumentation_tester.py +++ b/tests/unit/sdk/instrumentation_tester.py @@ -1,5 +1,6 @@ -from typing import Any, Dict, List, Optional, Protocol, Tuple, Union +from typing import Any, Dict, List, Protocol, Tuple, Union import importlib +import unittest.mock as mock from opentelemetry import trace as trace_api from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider @@ -8,9 +9,7 @@ InMemorySpanExporter from opentelemetry.util.types import Attributes -import agentops -from agentops.sdk.core import TracingCore -from agentops.sdk.processors import LiveSpanProcessor +from agentops.sdk.core import TracingCore, setup_telemetry def create_tracer_provider( @@ -40,7 +39,7 @@ def reset_trace_globals(): """Reset the global trace state to avoid conflicts.""" # Reset tracer provider trace_api._TRACER_PROVIDER = None - + # Reload the trace module to clear warning state importlib.reload(trace_api) @@ -74,10 +73,10 @@ def __init__(self): """Initialize the instrumentation tester.""" # Reset any global state first reset_trace_globals() - + # Shut down any existing tracing core - self._shutdown_core() - + # self._shutdown_core() + # Create a new tracer provider and memory exporter ( self.tracer_provider, @@ -88,12 +87,23 @@ def __init__(self): # Set the tracer provider trace_api.set_tracer_provider(self.tracer_provider) - # Get a fresh instance of the tracing core + # Create a mock for the meter provider + self.mock_meter_provider = mock.MagicMock() + + # Patch the setup_telemetry function to return our test providers + self.setup_telemetry_patcher = mock.patch( + 'agentops.sdk.core.setup_telemetry', + return_value=(self.tracer_provider, self.mock_meter_provider) + ) + self.mock_setup_telemetry = self.setup_telemetry_patcher.start() + + # Reset the tracing core to force reinitialization core = TracingCore.get_instance() + core._initialized = False + core._provider = None - # Set the tracing core's provider to our provider - core._provider = self.tracer_provider - core._initialized = True + # Initialize the core, which will now use our mocked setup_telemetry + core.initialize() self.clear_spans() @@ -120,22 +130,34 @@ def reset(self): # Clear any existing spans self.clear_spans() - + # Reset global trace state reset_trace_globals() - + # Set our tracer provider again trace_api.set_tracer_provider(self.tracer_provider) # Shut down and re-initialize the tracing core self._shutdown_core() - # Get a fresh instance of the tracing core + # Reset the mock setup_telemetry function + self.mock_setup_telemetry.reset_mock() + + # Reset the tracing core to force reinitialization core = TracingCore.get_instance() + core._initialized = False + core._provider = None - # Set the tracing core's provider to our provider - core._provider = self.tracer_provider - core._initialized = True + # Initialize the core, which will now use our mocked setup_telemetry + core.initialize() + + def __del__(self): + """Clean up resources when the tester is garbage collected.""" + try: + # Stop the patcher when the tester is deleted + self.setup_telemetry_patcher.stop() + except Exception: + pass def get_finished_spans(self) -> List[ReadableSpan]: """Get all finished spans.""" diff --git a/tests/unit/sdk/test_context_utils.py b/tests/unit/sdk/test_context_utils.py deleted file mode 100644 index b37b42799..000000000 --- a/tests/unit/sdk/test_context_utils.py +++ /dev/null @@ -1,97 +0,0 @@ -import sys -import os -import pytest -from unittest.mock import patch, MagicMock - -from opentelemetry import trace -from opentelemetry.trace import Span - -# Import directly from the module file to avoid circular imports -from agentops.sdk.decorators.context_utils import use_span_context, with_span_context, get_trace_id - - -@pytest.fixture -def mock_span(): - """Fixture to create a mock span with a trace ID.""" - span = MagicMock(spec=Span) - span.get_span_context.return_value.trace_id = 123456789 - return span - - -@pytest.fixture -def mock_context_deps(): - """Fixture to mock the context dependencies.""" - with ( - patch("agentops.sdk.decorators.context_utils.context") as mock_context, - patch("agentops.sdk.decorators.context_utils.trace") as mock_trace, - patch("agentops.sdk.decorators.context_utils.logger") as mock_logger, - ): - # Set up the mocks - mock_context.get_current.return_value = "current_context" - mock_trace.set_span_in_context.return_value = "new_context" - mock_context.attach.return_value = "token" - - yield {"context": mock_context, "trace": mock_trace, "logger": mock_logger} - - -def test_use_span_context(mock_span, mock_context_deps): - """Test that the use_span_context context manager works correctly.""" - mock_context = mock_context_deps["context"] - mock_trace = mock_context_deps["trace"] - mock_logger = mock_context_deps["logger"] - - # Use the context manager - with use_span_context(mock_span): - # Verify the context was attached - mock_context.get_current.assert_called_once() - mock_trace.set_span_in_context.assert_called_once_with(mock_span, "current_context") - mock_context.attach.assert_called_once_with("new_context") - mock_logger.debug.assert_called_with("Span context attached: 123456789") - - # Verify the context was detached - mock_context.detach.assert_called_once_with("token") - mock_logger.debug.assert_called_with("Span context detached: 123456789") - - -def test_get_trace_id(mock_span): - """Test that get_trace_id returns the correct trace ID.""" - # Get the trace ID - trace_id = get_trace_id(mock_span) - - # Verify the trace ID - assert trace_id == "123456789" - - # Test with None span - trace_id = get_trace_id(None) - assert trace_id == "unknown" - - -def test_with_span_context(mock_span, mock_context_deps): - """Test that the with_span_context decorator works correctly.""" - mock_context = mock_context_deps["context"] - mock_trace = mock_context_deps["trace"] - mock_logger = mock_context_deps["logger"] - - # Create a class with a span attribute - class TestClass: - def __init__(self): - self.span = mock_span - - @with_span_context - def test_method(self): - return "test" - - # Create an instance - test_instance = TestClass() - - # Call the decorated method - result = test_instance.test_method() - - # Verify the result - assert result == "test" - - # Verify the context was attached and detached - mock_context.get_current.assert_called_once() - mock_trace.set_span_in_context.assert_called_once_with(test_instance.span, "current_context") - mock_context.attach.assert_called_once_with("new_context") - mock_context.detach.assert_called_once_with("token") diff --git a/tests/unit/sdk/test_core.py b/tests/unit/sdk/test_core.py deleted file mode 100644 index 409d49584..000000000 --- a/tests/unit/sdk/test_core.py +++ /dev/null @@ -1,143 +0,0 @@ -import pytest -from unittest.mock import MagicMock, patch -from uuid import UUID - -from opentelemetry.sdk.trace import TracerProvider -from opentelemetry.trace import StatusCode - -from agentops.sdk.types import TracingConfig -from agentops.sdk.core import TracingCore -from agentops.sdk.traced import TracedObject -from agentops.semconv.core import CoreAttributes - - -@pytest.fixture -def reset_tracing_core(): - """Reset the TracingCore singleton instance before each test.""" - TracingCore._instance = None - yield - - -def test_get_instance(reset_tracing_core): - """Test get_instance method.""" - # Test getting the instance - instance1 = TracingCore.get_instance() - assert isinstance(instance1, TracingCore) - - # Test singleton pattern - instance2 = TracingCore.get_instance() - assert instance2 is instance1 - - -@patch("agentops.sdk.core.TracerProvider") -@patch("agentops.sdk.core.trace") -def test_initialize(mock_trace, mock_tracer_provider, reset_tracing_core): - """Test initialization.""" - # Set up - core = TracingCore() - config = {"service_name": "test_service", "max_queue_size": 512, "max_wait_time": 5000} - mock_provider = MagicMock() - mock_tracer_provider.return_value = mock_provider - mock_trace.get_tracer_provider.return_value = mock_provider - - # Test - core.initialize(**config) - - # Verify - mock_tracer_provider.assert_called_once() - mock_provider.add_span_processor.assert_called() - - # Test with existing provider - mock_tracer_provider.reset_mock() - mock_provider.reset_mock() - mock_trace.get_tracer_provider.return_value = mock_provider - - core.initialize(**config) - mock_tracer_provider.assert_not_called() - - -def test_shutdown(reset_tracing_core): - """Test shutdown method.""" - # Set up - core = TracingCore() - core._initialized = True - processor1 = MagicMock() - processor2 = MagicMock() - core._processors = [processor1, processor2] - core._provider = MagicMock() - - # Test shutdown - core.shutdown() - assert not core._initialized - processor1.force_flush.assert_called_once() - processor2.force_flush.assert_called_once() - core._provider.shutdown.assert_called_once() - - # Test shutting down an already shut down core - processor1.reset_mock() - processor2.reset_mock() - core._provider.reset_mock() - core.shutdown() - processor1.force_flush.assert_not_called() - processor2.force_flush.assert_not_called() - core._provider.shutdown.assert_not_called() - - -def test_get_tracer(reset_tracing_core): - """Test get_tracer method.""" - # Set up - core = TracingCore() - mock_tracer = MagicMock() - with patch("agentops.sdk.core.trace") as mock_trace: - mock_trace.get_tracer.return_value = mock_tracer - - # Test getting a tracer when not initialized - with pytest.raises(RuntimeError): - core.get_tracer() - - # Test getting a tracer when initialized - core._initialized = True - tracer = core.get_tracer("test_tracer") - assert tracer == mock_tracer - mock_trace.get_tracer.assert_called_once_with("test_tracer") - - -@patch("agentops.sdk.core.SpanFactory") -def test_create_span(mock_factory, reset_tracing_core): - """Test create_span method.""" - # Set up - core = TracingCore() - mock_span = MagicMock() - mock_factory.create_span.return_value = mock_span - - # Test creating a span when not initialized - with pytest.raises(RuntimeError): - core.create_span(kind="test", name="test_span") - - # Test creating a span when initialized - core._initialized = True - span = core.create_span(kind="test", name="test_span", attributes={"key": "value"}, immediate_export=True) - assert span == mock_span - mock_factory.create_span.assert_called_once_with( - kind="test", - name="test_span", - parent=None, - attributes={"key": "value", CoreAttributes.EXPORT_IMMEDIATELY: True}, - auto_start=True, - immediate_export=True, - ) - - -@patch("agentops.sdk.core.SpanFactory") -def test_register_span_type(mock_factory, reset_tracing_core): - """Test register_span_type method.""" - # Set up - core = TracingCore() - - # Create a proper subclass of TracedObject for the test - class TestSpanClass(TracedObject): - pass - - # Test - core.register_span_type("test", TestSpanClass) - mock_factory.register_span_type.assert_called_once_with("test", TestSpanClass) diff --git a/tests/unit/sdk/test_instrumentation.py b/tests/unit/sdk/test_instrumentation.py deleted file mode 100644 index f13df656b..000000000 --- a/tests/unit/sdk/test_instrumentation.py +++ /dev/null @@ -1,321 +0,0 @@ -import time -from typing import Any, Dict, List, Callable - -import pytest -from opentelemetry import context, trace -from opentelemetry.trace import StatusCode - -import agentops -from agentops.sdk.decorators.agent import agent -from agentops.sdk.decorators.session import session -from agentops.sdk.decorators.tool import tool -from agentops.semconv.agent import AgentAttributes -from agentops.semconv.span_kinds import SpanKind -from agentops.semconv.tool import ToolAttributes -from tests.unit.sdk.instrumentation_tester import InstrumentationTester - - -class TestBasicInstrumentation: - """Test basic instrumentation functionality.""" - - def test_basic_example(self, instrumentation: InstrumentationTester): - """Test a basic example with session, agent, and tools.""" - print("Starting test_basic_example") - - # Clear any previous spans - instrumentation.clear_spans() - - @session(name="search_session", tags=["example", "search"], immediate_export=True) - class SearchSession: - def __init__(self, query: str): - self.query = query - self.agent = SearchAgent(self) - - def run(self) -> Dict[str, Any]: - return self.agent.search(self.query) - - @agent(name="search_agent", agent_type="search", immediate_export=True) - class SearchAgent: - def __init__(self, session): - self.session = session - - def search(self, query: str) -> Dict[str, Any]: - # Use tools to perform the search - results = self.web_search(query) - processed = self.process_results(results) - return {"query": query, "results": processed} - - @tool(name="web_search", tool_type="search", immediate_export=True) - def web_search(self, query: str) -> List[str]: - return [f"Result 1 for {query}", f"Result 2 for {query}"] - - @tool(name="process_results", tool_type="processing", immediate_export=True) - def process_results(self, results: List[str]) -> List[Dict[str, Any]]: - return [{"title": r, "relevance": 0.9} for r in results] - - # Create and run the session - search_session = SearchSession("test query") - result = search_session.run() - - # End the session - if hasattr(search_session, "_session_span"): - search_session._session_span.end() - - # Flush spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the result - assert "query" in result - assert "results" in result - assert len(result["results"]) == 2 - - # Get all spans by kind - session_spans = instrumentation.get_spans_by_kind("session") - agent_spans = instrumentation.get_spans_by_kind(SpanKind.AGENT) - tool_spans = instrumentation.get_spans_by_kind(SpanKind.TOOL) - - print(f"Found {len(session_spans)} session spans") - print(f"Found {len(agent_spans)} agent spans") - print(f"Found {len(tool_spans)} tool spans") - - # Check session spans - if len(session_spans) > 0: - session_span = session_spans[0] - instrumentation.assert_has_attributes( - session_span, - { - "span.kind": "session", - "session.name": "search_session", - }, - ) - # Check for tags - assert "session.tags" in session_span.attributes - - # Check agent spans - if len(agent_spans) > 0: - agent_span = agent_spans[0] - instrumentation.assert_has_attributes( - agent_span, - { - "span.kind": SpanKind.AGENT, - AgentAttributes.AGENT_NAME: "search_agent", - AgentAttributes.AGENT_ROLE: "search", - }, - ) - - # Check tool spans - if len(tool_spans) > 0: - # We should have at least two tool spans (web_search and process_results) - # Find the web_search tool span - web_search_span = None - process_results_span = None - - for span in tool_spans: - if span.name == "web_search": - web_search_span = span - elif span.name == "process_results": - process_results_span = span - - if web_search_span: - instrumentation.assert_has_attributes( - web_search_span, - { - "span.kind": SpanKind.TOOL, - ToolAttributes.TOOL_NAME: "web_search", - ToolAttributes.TOOL_DESCRIPTION: "search", - }, - ) - # Check for input and output parameters - assert ToolAttributes.TOOL_PARAMETERS in web_search_span.attributes - assert ToolAttributes.TOOL_RESULT in web_search_span.attributes - - if process_results_span: - instrumentation.assert_has_attributes( - process_results_span, - { - "span.kind": SpanKind.TOOL, - ToolAttributes.TOOL_NAME: "process_results", - ToolAttributes.TOOL_DESCRIPTION: "processing", - }, - ) - # Check for input and output parameters - assert ToolAttributes.TOOL_PARAMETERS in process_results_span.attributes - assert ToolAttributes.TOOL_RESULT in process_results_span.attributes - - def test_context_propagation(self, instrumentation: InstrumentationTester): - """Test that OpenTelemetry context is properly propagated and doesn't leak.""" - print("\n=== Testing context propagation ===") - - # First test direct context setting and getting to verify OTel is working - - # Create a direct test of context propagation - print("\n--- Direct Context Test ---") - - # Set a value in the context - ctx = context.set_value("test_key", "test_value") - - # Get the value back - value = context.get_value("test_key", context=ctx) - print(f"Direct context test: {value}") - assert value == "test_value", "Failed to retrieve value from context" - - # Now test with span context - test_tracer = trace.get_tracer("test_tracer") - - with test_tracer.start_as_current_span("test_span") as span: - # Get the current span and its ID - current_span = trace.get_current_span() - span_id = current_span.get_span_context().span_id - print(f"Current span ID: {span_id}") - - # Store it in context - ctx_with_span = context.get_current() - - # Save it for later - saved_ctx = ctx_with_span - - # Detach from current context to simulate method boundary - token = context.attach(context.get_current()) - context.detach(token) - - # Now current span should be None or different - current_span_after_detach = trace.get_current_span() - span_id_after_detach = ( - current_span_after_detach.get_span_context().span_id if current_span_after_detach else 0 - ) - print(f"Span ID after detach: {span_id_after_detach}") - - # Restore the context - token = context.attach(saved_ctx) - try: - # Check if span is restored - restored_span = trace.get_current_span() - restored_id = restored_span.get_span_context().span_id if restored_span else 0 - print(f"Restored span ID: {restored_id}") - assert restored_id == span_id, "Failed to restore span context properly" - finally: - context.detach(token) - - print("Basic context test passed!") - - # Now test our actual decorators - print("\n--- Decorator Context Test ---") - - # Define the agent class first - @agent(name="test_agent", agent_type="test", immediate_export=True) - class TestAgent: - def __init__(self, agent_id: str): - self.agent_id = agent_id - # Get the current span from context - current_span = trace.get_current_span() - self.parent_span_id = current_span.get_span_context().span_id if current_span else 0 - print(f"TestAgent({agent_id}) - Parent span ID: {self.parent_span_id}") - - # After the agent decorator, we should have an agent span - self.agent_span_id = 0 # Initialize to ensure we don't get None - agent_span = trace.get_current_span() - if agent_span and agent_span.is_recording(): - self.agent_span_id = agent_span.get_span_context().span_id - print(f"TestAgent({agent_id}) - Agent span ID: {self.agent_span_id}") - else: - print(f"TestAgent({agent_id}) - No agent span found!") - - # Save the context with the agent span - self.agent_context = context.get_current() - - def process(self, data: str): - raw_span_id = 0 - current_span = trace.get_current_span() - if current_span: - raw_span_id = current_span.get_span_context().span_id - print(f"TestAgent.process - Raw span ID: {raw_span_id}") - - # Restore the agent context - token = context.attach(self.agent_context) - try: - # Now the current span should be the agent span - current_span = trace.get_current_span() - span_id = current_span.get_span_context().span_id if current_span else 0 - print(f"TestAgent({self.agent_id}).process - With context - Current span ID: {span_id}") - - # Verify span IDs match from __init__ - if self.agent_span_id != 0: # Only check if we actually got a span ID - assert ( - span_id == self.agent_span_id - ), f"Agent span ID changed between __init__ and process! {self.agent_span_id} != {span_id}" - - # Process using a tool - processed = self.transform_tool(data) - return {"result": processed, "agent_id": self.agent_id} - finally: - context.detach(token) - - @tool(name="transform_tool", tool_type="transform", immediate_export=True) - def transform_tool(self, data: str, tool_span=None) -> str: - # The current span should be the tool span - current_span = trace.get_current_span() - tool_span_id = current_span.get_span_context().span_id if current_span else 0 - print(f"TestAgent({self.agent_id}).transform_tool - Tool span ID: {tool_span_id}") - - # Tool span should be different from agent span - if tool_span_id != 0 and self.agent_span_id != 0: - assert tool_span_id != self.agent_span_id, "Tool span should be different from agent span" - - return f"Transformed: {data} by agent {self.agent_id}" - - # Create session class to test context propagation - @session(name="session_a", tags=["test_a"], immediate_export=True) - class SessionA: - def __init__(self, session_id: str): - self.session_id = session_id - # Get the current span and verify it's our session span - current_span = trace.get_current_span() - # Store the span ID for later verification - self.span_id = 0 # Initialize to avoid None - if current_span and current_span.is_recording(): - self.span_id = current_span.get_span_context().span_id - print(f"SessionA({session_id}) - Span ID: {self.span_id}") - else: - print(f"SessionA({session_id}) - No current span found!") - - # Store the current context for manual restoration in run method - self.context = context.get_current() - - def run(self): - raw_span_id = 0 - current_span = trace.get_current_span() - if current_span: - raw_span_id = current_span.get_span_context().span_id - print(f"SessionA.run called - Raw span ID: {raw_span_id}") - - # Manually attach the stored context - token = context.attach(self.context) - try: - # The span from __init__ should now be the current span - current_span = trace.get_current_span() - span_id = current_span.get_span_context().span_id if current_span else 0 - print(f"SessionA({self.session_id}).run - With manual context - Current span ID: {span_id}") - - # Verify span IDs match if we got a span in __init__ - if self.span_id != 0: - assert ( - span_id == self.span_id - ), f"Span ID changed between __init__ and run! {self.span_id} != {span_id}" - - # Create an agent within this session context - agent = TestAgent(self.session_id) - return agent.process("test data") - finally: - context.detach(token) - - # Create one test session - session_a = SessionA("A123") - - # Run the session - result_a = session_a.run() - - # Verify correct results - assert result_a["agent_id"] == "A123" - assert "Transformed: test data" in result_a["result"] - - print("Context propagation test passed!") diff --git a/tests/unit/sdk/test_instrumentation_errors.py b/tests/unit/sdk/test_instrumentation_errors.py deleted file mode 100644 index 3b8385fef..000000000 --- a/tests/unit/sdk/test_instrumentation_errors.py +++ /dev/null @@ -1,380 +0,0 @@ -import pytest -from typing import Dict, Any, List - -import agentops -from agentops.sdk.core import TracingCore -from agentops.sdk.decorators.agent import agent -from agentops.sdk.decorators.session import session -from agentops.sdk.decorators.tool import tool -from opentelemetry.trace import StatusCode -from agentops.semconv.span_kinds import SpanKind -from agentops.semconv.agent import AgentAttributes -from agentops.semconv.tool import ToolAttributes -from agentops.semconv.core import CoreAttributes - -from tests.unit.sdk.instrumentation_tester import InstrumentationTester - - -class TestErrorInstrumentation: - """Test error handling in instrumentation.""" - - def test_session_with_error(self, instrumentation: InstrumentationTester): - """Test that sessions with errors are properly instrumented.""" - - @session(name="error_session", immediate_export=True) - class ErrorSession: - def __init__(self): - pass - - def run(self): - # Explicitly set the status to ERROR before raising the exception - if hasattr(self, "_session_span"): - self._session_span.set_status(StatusCode.ERROR, "Test error") - raise ValueError("Test error") - - # Create and run a session that raises an error - error_session = ErrorSession() - - # Run the session and catch the error - with pytest.raises(ValueError, match="Test error"): - error_session.run() - - # Manually trigger the live span processor to export any in-flight spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the spans - spans = instrumentation.get_finished_spans() - # If we're running with -s flag, the test passes, but it fails in the full test suite - # So we'll check if we have spans, and if not, we'll print a warning but still pass the test - if len(spans) == 0: - print("WARNING: No spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Get the session span - session_spans = instrumentation.get_spans_by_kind("session") - if len(session_spans) == 0: - print("WARNING: No session spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - session_span = session_spans[0] - - # Check for error attributes - if session_span.status.status_code == StatusCode.ERROR: - print(f"Session span status: {session_span.status.status_code}") - print(f"Session span description: {session_span.status.description}") - - # Check if the error message is set using CoreAttributes - if CoreAttributes.ERROR_MESSAGE in session_span.attributes: - error_message = session_span.attributes[CoreAttributes.ERROR_MESSAGE] - print(f"Error message attribute: {error_message}") - assert "Test error" in error_message - - def test_agent_with_error(self, instrumentation: InstrumentationTester): - """Test that agents with errors are properly instrumented.""" - - @session(name="test_session", immediate_export=True) - class TestSession: - def __init__(self): - self.agent = ErrorAgent() - - def run(self): - try: - return self.agent.process("test") - except ValueError: - return {"error": "Agent error"} - - @agent(name="error_agent", agent_type="test", immediate_export=True) - class ErrorAgent: - def process(self, data: str): - raise ValueError("Agent error") - - # Create and run a session with an agent that raises an error - test_session = TestSession() - result = test_session.run() - - # Check the result - assert result == {"error": "Agent error"} - - # Manually trigger the live span processor to export any in-flight spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the spans - spans = instrumentation.get_finished_spans() - # If we're running with -s flag, the test passes, but it fails in the full test suite - # So we'll check if we have spans, and if not, we'll print a warning but still pass the test - if len(spans) == 0: - print("WARNING: No spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Get the agent span - agent_spans = instrumentation.get_spans_by_kind(SpanKind.AGENT) - if len(agent_spans) == 0: - print("WARNING: No agent spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - agent_span = agent_spans[0] - - # Check the agent span attributes - instrumentation.assert_has_attributes( - agent_span, - { - "span.kind": SpanKind.AGENT, - AgentAttributes.AGENT_NAME: "error_agent", - AgentAttributes.AGENT_ROLE: "test", - }, - ) - - # Check the agent span status - assert agent_span.status.status_code == StatusCode.ERROR - assert agent_span.status.description is not None - assert "Agent error" in agent_span.status.description - - # Check if the error message is set using CoreAttributes - if CoreAttributes.ERROR_MESSAGE in agent_span.attributes: - error_message = agent_span.attributes[CoreAttributes.ERROR_MESSAGE] - print(f"Error message attribute: {error_message}") - assert "Agent error" in error_message - - def test_tool_with_error(self, instrumentation: InstrumentationTester): - """Test that tools with errors are properly instrumented.""" - - @session(name="test_session", immediate_export=True) - class TestSession: - def __init__(self): - self.agent = TestAgent() - - def run(self): - try: - return self.agent.process("test") - except ValueError: - return {"error": "Tool error"} - - @agent(name="test_agent", agent_type="test", immediate_export=True) - class TestAgent: - def process(self, data: str): - try: - result = self.error_tool(data) - return {"processed": result} - except ValueError as e: - raise ValueError(f"Tool error: {str(e)}") - - @tool(name="error_tool", tool_type="error_test", immediate_export=True) - def error_tool(self, data: str): - raise ValueError("This tool always fails") - - # Create and run a session with an agent that uses a tool that raises an error - test_session = TestSession() - result = test_session.run() - - # Check the result - assert result == {"error": "Tool error"} - - # Manually trigger the live span processor to export any in-flight spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the spans - spans = instrumentation.get_finished_spans() - # If we're running with -s flag, the test passes, but it fails in the full test suite - # So we'll check if we have spans, and if not, we'll print a warning but still pass the test - if len(spans) == 0: - print("WARNING: No spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Get the tool span - tool_spans = instrumentation.get_spans_by_kind(SpanKind.TOOL) - if len(tool_spans) == 0: - print("WARNING: No tool spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - tool_span = tool_spans[0] - - # Check the tool span attributes - instrumentation.assert_has_attributes( - tool_span, - { - "span.kind": SpanKind.TOOL, - ToolAttributes.TOOL_NAME: "error_tool", - ToolAttributes.TOOL_DESCRIPTION: "error_test", - }, - ) - - # Check the tool span status - assert tool_span.status.status_code == StatusCode.ERROR - assert tool_span.status.description is not None - assert "This tool always fails" in tool_span.status.description - - # Check if the error message is set using CoreAttributes - if CoreAttributes.ERROR_MESSAGE in tool_span.attributes: - error_message = tool_span.attributes[CoreAttributes.ERROR_MESSAGE] - print(f"Tool error message attribute: {error_message}") - assert "This tool always fails" in error_message - - # Get the agent span - agent_spans = instrumentation.get_spans_by_kind(SpanKind.AGENT) - if len(agent_spans) == 0: - print("WARNING: No agent spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - agent_span = agent_spans[0] - - # Check the agent span attributes - instrumentation.assert_has_attributes( - agent_span, - { - "span.kind": SpanKind.AGENT, - AgentAttributes.AGENT_NAME: "test_agent", - AgentAttributes.AGENT_ROLE: "test", - }, - ) - - # Check the agent span status - assert agent_span.status.status_code == StatusCode.ERROR - - def test_context_manager_with_error(self, instrumentation: InstrumentationTester): - """Test that spans used as context managers handle errors properly.""" - # Import the necessary modules - from agentops.sdk.factory import SpanFactory - from agentops.sdk.types import TracingConfig - - # Create a minimal config for the session span - config = TracingConfig(service_name="test_service") - - # Use a custom span instead of a session span to avoid the SessionSpan.end() issue - try: - with SpanFactory.create_span(kind="custom", name="context_manager_test", immediate_export=True): - raise ValueError("Context manager error") - except ValueError: - # Catch the error to continue the test - pass - - # Manually trigger the live span processor to export any in-flight spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the spans - spans = instrumentation.get_finished_spans() - # If we're running with -s flag, the test passes, but it fails in the full test suite - # So we'll check if we have spans, and if not, we'll print a warning but still pass the test - if len(spans) == 0: - print("WARNING: No spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Find the custom span - custom_spans = [span for span in spans if span.name == "context_manager_test"] - if len(custom_spans) == 0: - print("WARNING: No custom spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - custom_span = custom_spans[0] - - # Check the span status - print(f"Custom span status: {custom_span.status.status_code}") - print(f"Custom span description: {custom_span.status.description}") - - # Check if the error message is set using CoreAttributes - if ( - custom_span.status.status_code == StatusCode.ERROR - and CoreAttributes.ERROR_MESSAGE in custom_span.attributes - ): - error_message = custom_span.attributes[CoreAttributes.ERROR_MESSAGE] - print(f"Error message attribute: {error_message}") - assert "Context manager error" in error_message - - def test_nested_errors(self, instrumentation: InstrumentationTester): - """Test that nested spans handle errors properly.""" - - @session(name="outer_session", immediate_export=True) - class OuterSession: - def __init__(self): - self.inner_agent = InnerAgent() - - def run(self): - try: - return self.inner_agent.process("test") - except ValueError: - return {"error": "Caught in outer session"} - - @agent(name="inner_agent", agent_type="inner_test", immediate_export=True) - class InnerAgent: - def process(self, data: str): - # This will raise an error in the tool - result = self.failing_tool(data) - return {"processed": result} - - @tool(name="failing_tool", tool_type="failing_test", immediate_export=True) - def failing_tool(self, data: str): - raise ValueError("Inner tool error") - - # Create and run the outer session - outer_session = OuterSession() - result = outer_session.run() - - # Check the result - assert result == {"error": "Caught in outer session"} - - # Flush spans - instrumentation.span_processor.export_in_flight_spans() - - # Check the spans - spans = instrumentation.get_finished_spans() - # If we're running with -s flag, the test passes, but it fails in the full test suite - # So we'll check if we have spans, and if not, we'll print a warning but still pass the test - if len(spans) == 0: - print("WARNING: No spans found, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Get spans by kind - session_spans = instrumentation.get_spans_by_kind("session") - agent_spans = instrumentation.get_spans_by_kind(SpanKind.AGENT) - tool_spans = instrumentation.get_spans_by_kind(SpanKind.TOOL) - - # Check if we have the expected spans - if len(session_spans) == 0 or len(agent_spans) == 0 or len(tool_spans) == 0: - print("WARNING: Missing some spans, but test is passing because we're running in a test suite") - return # Skip the rest of the test - - # Check the tool span - tool_span = tool_spans[0] - - # Check the tool span attributes - instrumentation.assert_has_attributes( - tool_span, - { - "span.kind": SpanKind.TOOL, - ToolAttributes.TOOL_NAME: "failing_tool", - ToolAttributes.TOOL_DESCRIPTION: "failing_test", - }, - ) - - # Check the tool span status - assert tool_span.status.status_code == StatusCode.ERROR - assert tool_span.status.description is not None - assert "Inner tool error" in tool_span.status.description - - # Check if the error message is set using CoreAttributes - if CoreAttributes.ERROR_MESSAGE in tool_span.attributes: - error_message = tool_span.attributes[CoreAttributes.ERROR_MESSAGE] - print(f"Tool error message attribute: {error_message}") - assert "Inner tool error" in error_message - - # Check the agent span - agent_span = agent_spans[0] - - # Check the agent span attributes - instrumentation.assert_has_attributes( - agent_span, - { - "span.kind": SpanKind.AGENT, - AgentAttributes.AGENT_NAME: "inner_agent", - AgentAttributes.AGENT_ROLE: "inner_test", - }, - ) - - # Check the agent span status - assert agent_span.status.status_code == StatusCode.ERROR - assert agent_span.status.description is not None - - # Check the session span - session_span = session_spans[0] - - # The session should be OK because it caught the error - assert session_span.status.status_code == StatusCode.OK diff --git a/tests/unit/test_session_legacy.py b/tests/unit/test_session_legacy.py new file mode 100644 index 000000000..e63557787 --- /dev/null +++ b/tests/unit/test_session_legacy.py @@ -0,0 +1,160 @@ + + +def test_session_auto_start(instrumentation): + import agentops + from agentops.legacy import Session + + # Pass a dummy API key for the test + session = agentops.init(api_key="test-api-key", auto_start_session=True) + + assert isinstance(session, Session) + + +def test_crewai_backwards_compatibility(instrumentation): + """ + CrewAI needs to access: + + agentops.track_agent + agentops.track_tool + agentops.start_session + agentops.end_session + agentops.ActionEvent + agentops.ToolEvent + """ + import agentops + from agentops.legacy import Session + + # Test initialization with API key + agentops.init(api_key="test-api-key") + + # Test session management functions + session = agentops.start_session(tags=["test", "crewai"]) + assert isinstance(session, Session) + + # Test that passing a string to end_session doesn't raise an error + agentops.end_session("Success") # This pattern is used in CrewAI + + # Test track_agent function exists and doesn't raise errors + try: + # Mock an agent object similar to what CrewAI would provide + class MockAgent: + def __init__(self): + self.role = "Test Agent" + self.goal = "Testing" + self.id = "test-agent-id" + + agent = MockAgent() + agentops.track_agent(agent) + except Exception as e: + assert False, f"track_agent raised an exception: {e}" + + # Test track_tool function exists and doesn't raise errors + try: + # Mock a tool object similar to what CrewAI would provide + class MockTool: + def __init__(self): + self.name = "Test Tool" + self.description = "A test tool" + + tool = MockTool() + agentops.track_tool(tool, "Test Agent") + except Exception as e: + assert False, f"track_tool raised an exception: {e}" + + # Test events that CrewAI might use + tool_event = agentops.ToolEvent(name="test_tool") + action_event = agentops.ActionEvent(action_type="test_action") + + # Verify that record function works with these events + agentops.record(tool_event) + agentops.record(action_event) + + +def test_crewai_kwargs_pattern(instrumentation): + """ + Test the CrewAI < 0.105.0 pattern where end_session is called with only kwargs. + + In versions < 0.105.0, CrewAI directly calls: + agentops.end_session( + end_state="Success", + end_state_reason="Finished Execution", + is_auto_end=True + ) + """ + import agentops + from agentops.legacy import Session + + # Initialize with test API key + agentops.init(api_key="test-api-key") + + # Create a session + session = agentops.start_session(tags=["test", "crewai-kwargs"]) + assert isinstance(session, Session) + + # Test the CrewAI < 0.105.0 pattern - calling end_session with only kwargs + agentops.end_session( + end_state="Success", + end_state_reason="Finished Execution", + is_auto_end=True + ) + + # After calling end_session, creating a new session should work correctly + # (this implicitly tests that the internal state is reset properly) + new_session = agentops.start_session(tags=["test", "post-end"]) + assert isinstance(new_session, Session) + + +def test_crewai_kwargs_pattern_no_session(instrumentation): + """ + Test the CrewAI < 0.105.0 pattern where end_session is called with only kwargs, + but no session has been created. + + This should log a warning but not fail. + """ + import agentops + + # Initialize with test API key + agentops.init(api_key="test-api-key") + + # We don't need to explicitly clear the session state + # Just make sure we start with a clean state by calling init + + # Test the CrewAI < 0.105.0 pattern - calling end_session with only kwargs + # when no session exists. This should not raise an error. + agentops.end_session( + end_state="Success", + end_state_reason="Finished Execution", + is_auto_end=True + ) + + +def test_crewai_kwargs_force_flush(): + """ + Test that when using the CrewAI < 0.105.0 pattern (end_session with kwargs), + the spans are properly exported to the backend with force_flush. + + This is a more comprehensive test that ensures spans are actually sent + to the backend when using the CrewAI integration pattern. + """ + import agentops + from agentops.sdk.core import TracingCore + import time + + # Initialize AgentOps with API key + agentops.init(api_key="test-api-key") + + # Create a session + session = agentops.start_session(tags=["test", "crewai-integration"]) + + # Simulate some work + time.sleep(0.1) + + # End session with kwargs (CrewAI < 0.105.0 pattern) + agentops.end_session( + end_state="Success", + end_state_reason="Test Finished", + is_auto_end=True + ) + + # Explicitly ensure the core isn't already shut down for the test + assert TracingCore.get_instance()._initialized, "TracingCore should still be initialized" \ No newline at end of file diff --git a/uv.lock b/uv.lock index e0bfa3465..92a77505b 100644 --- a/uv.lock +++ b/uv.lock @@ -13,32 +13,25 @@ resolution-markers = [ [manifest] constraints = [ - { name = "opentelemetry-api", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, - { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, - { name = "opentelemetry-sdk", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-sdk", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, { name = "pydantic", marker = "python_full_version >= '3.13'", specifier = ">=2.8.0" }, { name = "typing-extensions", marker = "python_full_version >= '3.13'" }, ] [[package]] name = "agentops" -version = "0.4.2" +version = "0.4.4" source = { editable = "." } dependencies = [ - { name = "opentelemetry-api", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-exporter-otlp-proto-http", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-exporter-otlp-proto-http", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-instrumentation", version = "0.48b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-instrumentation", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-sdk", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-sdk", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions", version = "0.43b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions-ai" }, + { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-api", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-exporter-otlp-proto-http", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-exporter-otlp-proto-http", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-instrumentation", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-instrumentation", version = "0.52b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-sdk", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-sdk", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.52b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "ordered-set" }, { name = "packaging" }, { name = "psutil" }, @@ -77,15 +70,16 @@ test = [ [package.metadata] requires-dist = [ - { name = "opentelemetry-api", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-api", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, - { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, - { name = "opentelemetry-instrumentation", specifier = ">=0.48b0" }, - { name = "opentelemetry-sdk", marker = "python_full_version < '3.10'", specifier = "==1.22.0" }, - { name = "opentelemetry-sdk", marker = "python_full_version >= '3.10'", specifier = ">=1.27.0" }, - { name = "opentelemetry-semantic-conventions", specifier = ">=0.43b0" }, - { name = "opentelemetry-semantic-conventions-ai", specifier = ">=0.4.2" }, + { name = "opentelemetry-api", marker = "python_full_version < '3.10'", specifier = "==1.29.0" }, + { name = "opentelemetry-api", marker = "python_full_version >= '3.10'", specifier = ">1.29.0" }, + { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version < '3.10'", specifier = "==1.29.0" }, + { name = "opentelemetry-exporter-otlp-proto-http", marker = "python_full_version >= '3.10'", specifier = ">1.29.0" }, + { name = "opentelemetry-instrumentation", marker = "python_full_version < '3.10'", specifier = "==0.50b0" }, + { name = "opentelemetry-instrumentation", marker = "python_full_version >= '3.10'", specifier = ">0.50b0" }, + { name = "opentelemetry-sdk", marker = "python_full_version < '3.10'", specifier = "==1.29.0" }, + { name = "opentelemetry-sdk", marker = "python_full_version >= '3.10'", specifier = ">1.29.0" }, + { name = "opentelemetry-semantic-conventions", marker = "python_full_version < '3.10'", specifier = "==0.50b0" }, + { name = "opentelemetry-semantic-conventions", marker = "python_full_version >= '3.10'", specifier = ">0.50b0" }, { name = "ordered-set", specifier = ">=4.0.0,<5.0.0" }, { name = "packaging", specifier = ">=21.0,<25.0" }, { name = "psutil", specifier = ">=5.9.8,<6.1.0" }, @@ -116,7 +110,7 @@ dev = [ test = [ { name = "anthropic" }, { name = "fastapi", extras = ["standard"] }, - { name = "openai", specifier = ">=1.0.0" }, + { name = "openai", specifier = ">=1.60.0" }, { name = "pytest-cov" }, ] @@ -180,15 +174,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 }, ] -[[package]] -name = "backoff" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148 }, -] - [[package]] name = "certifi" version = "2024.12.14" @@ -507,8 +492,7 @@ name = "googleapis-common-protos" version = "1.66.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "protobuf" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ff/a7/8e9cccdb1c49870de6faea2a2764fa23f627dd290633103540209f03524c/googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c", size = 114376 } wheels = [ @@ -1074,7 +1058,7 @@ wheels = [ [[package]] name = "openai" -version = "1.59.7" +version = "1.66.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1086,14 +1070,14 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f9/d5/25cf04789c7929b476c4d9ef711f8979091db63d30bfc093828fe4bf5c72/openai-1.59.7.tar.gz", hash = "sha256:043603def78c00befb857df9f0a16ee76a3af5984ba40cb7ee5e2f40db4646bf", size = 345007 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/77/5172104ca1df35ed2ed8fb26dbc787f721c39498fc51d666c4db07756a0c/openai-1.66.3.tar.gz", hash = "sha256:8dde3aebe2d081258d4159c4cb27bdc13b5bb3f7ea2201d9bd940b9a89faf0c9", size = 397244 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6d/47/7b92f1731c227f4139ef0025b5996062e44f9a749c54315c8bdb34bad5ec/openai-1.59.7-py3-none-any.whl", hash = "sha256:cfa806556226fa96df7380ab2e29814181d56fea44738c2b0e581b462c268692", size = 454844 }, + { url = "https://files.pythonhosted.org/packages/78/5a/e20182f7b6171642d759c548daa0ba20a1d3ac10d2bd0a13fd75704a9ac3/openai-1.66.3-py3-none-any.whl", hash = "sha256:a427c920f727711877ab17c11b95f1230b27767ba7a01e5b66102945141ceca9", size = 567400 }, ] [[package]] name = "opentelemetry-api" -version = "1.22.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", @@ -1103,14 +1087,14 @@ dependencies = [ { name = "deprecated", marker = "python_full_version < '3.10'" }, { name = "importlib-metadata", version = "6.11.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e8/56/b485bf0f42ae83a8ff97e861a3869f57415205ab8d1a22dd755319a97701/opentelemetry_api-1.22.0.tar.gz", hash = "sha256:15ae4ca925ecf9cfdfb7a709250846fbb08072260fca08ade78056c502b86bed", size = 56708 } +sdist = { url = "https://files.pythonhosted.org/packages/bc/8e/b886a5e9861afa188d1fe671fb96ff9a1d90a23d57799331e137cc95d573/opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf", size = 62900 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/2e/a8509051aa446783e24ee03d74bd268c07d5d25a8d48686cfcf3429d5d32/opentelemetry_api-1.22.0-py3-none-any.whl", hash = "sha256:43621514301a7e9f5d06dd8013a1b450f30c2e9372b8e30aaeb4562abf2ce034", size = 57947 }, + { url = "https://files.pythonhosted.org/packages/43/53/5249ea860d417a26a3a6f1bdedfc0748c4f081a3adaec3d398bc0f7c6a71/opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8", size = 64304 }, ] [[package]] name = "opentelemetry-api" -version = "1.29.0" +version = "1.31.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1124,31 +1108,30 @@ dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, { name = "importlib-metadata", version = "8.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/8e/b886a5e9861afa188d1fe671fb96ff9a1d90a23d57799331e137cc95d573/opentelemetry_api-1.29.0.tar.gz", hash = "sha256:d04a6cf78aad09614f52964ecb38021e248f5714dc32c2e0d8fd99517b4d69cf", size = 62900 } +sdist = { url = "https://files.pythonhosted.org/packages/58/89/9d80fa1265a25306b5d9b2707ef09094a6dda9feeac2ee159d5a214f989c/opentelemetry_api-1.31.0.tar.gz", hash = "sha256:d8da59e83e8e3993b4726e4c1023cd46f57c4d5a73142e239247e7d814309de1", size = 63853 } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/53/5249ea860d417a26a3a6f1bdedfc0748c4f081a3adaec3d398bc0f7c6a71/opentelemetry_api-1.29.0-py3-none-any.whl", hash = "sha256:5fcd94c4141cc49c736271f3e1efb777bebe9cc535759c54c936cca4f1b312b8", size = 64304 }, + { url = "https://files.pythonhosted.org/packages/a3/87/5413da9dd80d66ff86205bbd08a9cf69165642565c00cfce6590e0e82980/opentelemetry_api-1.31.0-py3-none-any.whl", hash = "sha256:145b72c6c16977c005c568ec32f4946054ab793d8474a17fd884b0397582c5f2", size = 65099 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.22.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "backoff", marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-proto", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-proto", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/0d/3bce16aab34a293c5ceaf8f924d4656abf6c41fc7d1225729b833977a16b/opentelemetry_exporter_otlp_proto_common-1.22.0.tar.gz", hash = "sha256:71ae2f81bc6d6fe408d06388826edc8933759b2ca3a97d24054507dc7cfce52d", size = 16371 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/58/f7fd7eaf592b2521999a4271ab3ce1c82fe37fe9b0dc25c348398d95d66a/opentelemetry_exporter_otlp_proto_common-1.29.0.tar.gz", hash = "sha256:e7c39b5dbd1b78fe199e40ddfe477e6983cb61aa74ba836df09c3869a3e3e163", size = 19133 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/75/0972205c139695ff3b21a58063e0e0440a81eaa2c5dd6ef4c1f22f58fdd5/opentelemetry_exporter_otlp_proto_common-1.22.0-py3-none-any.whl", hash = "sha256:3f2538bec5312587f8676c332b3747f54c89fe6364803a807e217af4603201fa", size = 17264 }, + { url = "https://files.pythonhosted.org/packages/9e/75/7609bda3d72bf307839570b226180513e854c01443ebe265ed732a4980fc/opentelemetry_exporter_otlp_proto_common-1.29.0-py3-none-any.whl", hash = "sha256:a9d7376c06b4da9cf350677bcddb9618ed4b8255c3f6476975f5e38274ecd3aa", size = 18459 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-common" -version = "1.29.0" +version = "1.31.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1159,39 +1142,38 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "opentelemetry-proto", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-proto", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/58/f7fd7eaf592b2521999a4271ab3ce1c82fe37fe9b0dc25c348398d95d66a/opentelemetry_exporter_otlp_proto_common-1.29.0.tar.gz", hash = "sha256:e7c39b5dbd1b78fe199e40ddfe477e6983cb61aa74ba836df09c3869a3e3e163", size = 19133 } +sdist = { url = "https://files.pythonhosted.org/packages/de/a8/d9c27ba8c9b27fd776320d51942e33c4d6636bf8b109f16302b11b583053/opentelemetry_exporter_otlp_proto_common-1.31.0.tar.gz", hash = "sha256:e7fa0fe8cf2f87c190a59d820b6ba0821234178bc1227b5bd40ca057622d4ddc", size = 20623 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/75/7609bda3d72bf307839570b226180513e854c01443ebe265ed732a4980fc/opentelemetry_exporter_otlp_proto_common-1.29.0-py3-none-any.whl", hash = "sha256:a9d7376c06b4da9cf350677bcddb9618ed4b8255c3f6476975f5e38274ecd3aa", size = 18459 }, + { url = "https://files.pythonhosted.org/packages/58/49/3441ab55fac1ba75f720eccd5a34640c7b451a10eaf34384e506250a8e01/opentelemetry_exporter_otlp_proto_common-1.31.0-py3-none-any.whl", hash = "sha256:42b402f2340c0612907799d91d13b928314f06c57c362dfa0c074e20b673f43d", size = 18824 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.22.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "backoff", marker = "python_full_version < '3.10'" }, { name = "deprecated", marker = "python_full_version < '3.10'" }, { name = "googleapis-common-protos", marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-api", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-exporter-otlp-proto-common", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-proto", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-sdk", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-exporter-otlp-proto-common", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-proto", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-sdk", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "requests", marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/32/f10897c31cf0145e12f1cb9991b83a58372589b129c044812c77981b5ada/opentelemetry_exporter_otlp_proto_http-1.22.0.tar.gz", hash = "sha256:79ed108981ec68d5f7985355bca32003c2f3a5be1534a96d62d5861b758a82f4", size = 13991 } +sdist = { url = "https://files.pythonhosted.org/packages/ab/88/e70a2e9fbb1bddb1ab7b6d74fb02c68601bff5948292ce33464c84ee082e/opentelemetry_exporter_otlp_proto_http-1.29.0.tar.gz", hash = "sha256:b10d174e3189716f49d386d66361fbcf6f2b9ad81e05404acdee3f65c8214204", size = 15041 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/84/e01ea7aed455191f264a06c2e5358b5c739d5c7029e29319f29f6c515626/opentelemetry_exporter_otlp_proto_http-1.22.0-py3-none-any.whl", hash = "sha256:e002e842190af45b91dc55a97789d0b98e4308c88d886b16049ee90e17a4d396", size = 16850 }, + { url = "https://files.pythonhosted.org/packages/31/49/a1c3d24e8fe73b5f422e21b46c24aed3db7fd9427371c06442e7bdfe4d3b/opentelemetry_exporter_otlp_proto_http-1.29.0-py3-none-any.whl", hash = "sha256:b228bdc0f0cfab82eeea834a7f0ffdd2a258b26aa33d89fb426c29e8e934d9d0", size = 17217 }, ] [[package]] name = "opentelemetry-exporter-otlp-proto-http" -version = "1.29.0" +version = "1.31.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1204,38 +1186,39 @@ resolution-markers = [ dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, { name = "googleapis-common-protos", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-exporter-otlp-proto-common", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-proto", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-sdk", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-exporter-otlp-proto-common", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-proto", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-sdk", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "requests", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ab/88/e70a2e9fbb1bddb1ab7b6d74fb02c68601bff5948292ce33464c84ee082e/opentelemetry_exporter_otlp_proto_http-1.29.0.tar.gz", hash = "sha256:b10d174e3189716f49d386d66361fbcf6f2b9ad81e05404acdee3f65c8214204", size = 15041 } +sdist = { url = "https://files.pythonhosted.org/packages/3f/03/754b4fc0ae15d728dd69d65557c00b8dd559071ae9d8f7be19884387eb01/opentelemetry_exporter_otlp_proto_http-1.31.0.tar.gz", hash = "sha256:09cbe2f96a1996cae94a426fbc59cc2f5bbe9a246233f15832d295e750b407de", size = 15143 } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/49/a1c3d24e8fe73b5f422e21b46c24aed3db7fd9427371c06442e7bdfe4d3b/opentelemetry_exporter_otlp_proto_http-1.29.0-py3-none-any.whl", hash = "sha256:b228bdc0f0cfab82eeea834a7f0ffdd2a258b26aa33d89fb426c29e8e934d9d0", size = 17217 }, + { url = "https://files.pythonhosted.org/packages/3e/5f/7cf80009109bc1a7a08aa373caf0e98e0952d23835b3604caa41ae83ed70/opentelemetry_exporter_otlp_proto_http-1.31.0-py3-none-any.whl", hash = "sha256:d9f00fd0684324e8c2dc13cd9580f94d85b6e10040c5f89a533f611c7583f910", size = 17256 }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.48b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "opentelemetry-api", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "setuptools", marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, { name = "wrapt", marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724 } +sdist = { url = "https://files.pythonhosted.org/packages/79/2e/2e59a7cb636dc394bd7cf1758ada5e8ed87590458ca6bb2f9c26e0243847/opentelemetry_instrumentation-0.50b0.tar.gz", hash = "sha256:7d98af72de8dec5323e5202e46122e5f908592b22c6d24733aad619f07d82979", size = 26539 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449 }, + { url = "https://files.pythonhosted.org/packages/ff/b1/55a77152a83ec8998e520a3a575f44af1020cfe4bdc000b7538583293b85/opentelemetry_instrumentation-0.50b0-py3-none-any.whl", hash = "sha256:b8f9fc8812de36e1c6dffa5bfc6224df258841fb387b6dfe5df15099daa10630", size = 30728 }, ] [[package]] name = "opentelemetry-instrumentation" -version = "0.50b0" +version = "0.52b0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1246,35 +1229,35 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.52b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "packaging", marker = "python_full_version >= '3.10'" }, { name = "wrapt", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/79/2e/2e59a7cb636dc394bd7cf1758ada5e8ed87590458ca6bb2f9c26e0243847/opentelemetry_instrumentation-0.50b0.tar.gz", hash = "sha256:7d98af72de8dec5323e5202e46122e5f908592b22c6d24733aad619f07d82979", size = 26539 } +sdist = { url = "https://files.pythonhosted.org/packages/61/6b/75c83ea59d8f9b15409cfe86eb36ca25dbf8168e53e41ecec1cd98003ba8/opentelemetry_instrumentation-0.52b0.tar.gz", hash = "sha256:da75d328f9dbd59c6e61af6adec29f4bb581f5cbf3ddfae348268f9c1edaceeb", size = 28406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/b1/55a77152a83ec8998e520a3a575f44af1020cfe4bdc000b7538583293b85/opentelemetry_instrumentation-0.50b0-py3-none-any.whl", hash = "sha256:b8f9fc8812de36e1c6dffa5bfc6224df258841fb387b6dfe5df15099daa10630", size = 30728 }, + { url = "https://files.pythonhosted.org/packages/90/2d/b0ebda7db9dbb6546782df0bfa70994673a64777ccf3852f8648ebbe4e9c/opentelemetry_instrumentation-0.52b0-py3-none-any.whl", hash = "sha256:0c93ca9fa1d438e2b741f21d6aa870c991e0e3b0f1367c8626bb3981b12ad2fe", size = 31034 }, ] [[package]] name = "opentelemetry-proto" -version = "1.22.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "protobuf", version = "4.25.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "protobuf", marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/5b50bb0a00d043fa116540f7367cdf70748a9ccb27f3cf7ed8ef299f6bdb/opentelemetry_proto-1.22.0.tar.gz", hash = "sha256:9ec29169286029f17ca34ec1f3455802ffb90131642d2f545ece9a63e8f69003", size = 33418 } +sdist = { url = "https://files.pythonhosted.org/packages/80/52/fd3b3d79e1b00ad2dcac92db6885e49bedbf7a6828647954e4952d653132/opentelemetry_proto-1.29.0.tar.gz", hash = "sha256:3c136aa293782e9b44978c738fff72877a4b78b5d21a64e879898db7b2d93e5d", size = 34320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/0d/579c664af2f1faca957c3d8c9159ae9fc7a1fe8de7b40a2d2e4fa1832574/opentelemetry_proto-1.22.0-py3-none-any.whl", hash = "sha256:ce7188d22c75b6d0fe53e7fb58501613d0feade5139538e79dedd9420610fa0c", size = 50778 }, + { url = "https://files.pythonhosted.org/packages/bd/66/a500e38ee322d89fce61c74bd7769c8ef3bebc6c2f43fda5f3fc3441286d/opentelemetry_proto-1.29.0-py3-none-any.whl", hash = "sha256:495069c6f5495cbf732501cdcd3b7f60fda2b9d3d4255706ca99b7ca8dec53ff", size = 55818 }, ] [[package]] name = "opentelemetry-proto" -version = "1.29.0" +version = "1.31.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1285,34 +1268,34 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "protobuf", version = "5.29.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "protobuf", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/52/fd3b3d79e1b00ad2dcac92db6885e49bedbf7a6828647954e4952d653132/opentelemetry_proto-1.29.0.tar.gz", hash = "sha256:3c136aa293782e9b44978c738fff72877a4b78b5d21a64e879898db7b2d93e5d", size = 34320 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/c9/f24d878bfcd050fc2f5d5b52d59ed4878390c30406db3f164aa99c6c60cc/opentelemetry_proto-1.31.0.tar.gz", hash = "sha256:5efe313788a8f4b739a94beb207749587a449a5e90c68b0b6a931567e8ca721d", size = 34361 } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/66/a500e38ee322d89fce61c74bd7769c8ef3bebc6c2f43fda5f3fc3441286d/opentelemetry_proto-1.29.0-py3-none-any.whl", hash = "sha256:495069c6f5495cbf732501cdcd3b7f60fda2b9d3d4255706ca99b7ca8dec53ff", size = 55818 }, + { url = "https://files.pythonhosted.org/packages/32/59/5c55b934a4dc6397aa64a2d2c5c84fce53db0531901567fa430ecc5f8070/opentelemetry_proto-1.31.0-py3-none-any.whl", hash = "sha256:ad4ded738e3d48d3280b37984eae75e63be01d8a0b04c83c743714aba960670d", size = 55855 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.22.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "opentelemetry-api", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "opentelemetry-semantic-conventions", version = "0.43b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2c/e5/8428cffb8905160be1fb9680da4be72394bd313437a559c0954cca68d983/opentelemetry_sdk-1.22.0.tar.gz", hash = "sha256:45267ac1f38a431fc2eb5d6e0c0d83afc0b78de57ac345488aa58c28c17991d0", size = 136651 } +sdist = { url = "https://files.pythonhosted.org/packages/0c/5a/1ed4c3cf6c09f80565fc085f7e8efa0c222712fd2a9412d07424705dcf72/opentelemetry_sdk-1.29.0.tar.gz", hash = "sha256:b0787ce6aade6ab84315302e72bd7a7f2f014b0fb1b7c3295b88afe014ed0643", size = 157229 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ff/94/588f49e0dd9a62ec46102736d2378330032a55e19c79ff7e4febea7ebed1/opentelemetry_sdk-1.22.0-py3-none-any.whl", hash = "sha256:a730555713d7c8931657612a88a141e3a4fe6eb5523d9e2d5a8b1e673d76efa6", size = 105558 }, + { url = "https://files.pythonhosted.org/packages/d1/1d/512b86af21795fb463726665e2f61db77d384e8779fdcf4cb0ceec47866d/opentelemetry_sdk-1.29.0-py3-none-any.whl", hash = "sha256:173be3b5d3f8f7d671f20ea37056710217959e774e2749d984355d1f9391a30a", size = 118078 }, ] [[package]] name = "opentelemetry-sdk" -version = "1.29.0" +version = "1.31.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1323,31 +1306,35 @@ resolution-markers = [ "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", ] dependencies = [ - { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-semantic-conventions", version = "0.50b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-api", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "opentelemetry-semantic-conventions", version = "0.52b0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/5a/1ed4c3cf6c09f80565fc085f7e8efa0c222712fd2a9412d07424705dcf72/opentelemetry_sdk-1.29.0.tar.gz", hash = "sha256:b0787ce6aade6ab84315302e72bd7a7f2f014b0fb1b7c3295b88afe014ed0643", size = 157229 } +sdist = { url = "https://files.pythonhosted.org/packages/a8/cc/2f461097fa53e7db0e5aca8be93be94d658a551f047129a206c34cc19d8d/opentelemetry_sdk-1.31.0.tar.gz", hash = "sha256:452d7d5b3c1db2e5e4cb64abede0ddd20690cb244a559c73a59652fdf6726070", size = 159527 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/1d/512b86af21795fb463726665e2f61db77d384e8779fdcf4cb0ceec47866d/opentelemetry_sdk-1.29.0-py3-none-any.whl", hash = "sha256:173be3b5d3f8f7d671f20ea37056710217959e774e2749d984355d1f9391a30a", size = 118078 }, + { url = "https://files.pythonhosted.org/packages/0b/f1/7dcc1fa9a27f3b346e56e8a090ee2312e36c8ff1f6f9f345a8fc778f0c4d/opentelemetry_sdk-1.31.0-py3-none-any.whl", hash = "sha256:97c9a03865e69723725fb64fe04343a488c3e61e684eb804bd7d6da2215dfc60", size = 118866 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.43b0" +version = "0.50b0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/1a/c73989de59d71c30922fce91edccda75942156e753d25976640dde0ac051/opentelemetry_semantic_conventions-0.43b0.tar.gz", hash = "sha256:b9576fb890df479626fa624e88dde42d3d60b8b6c8ae1152ad157a8b97358635", size = 34344 } +dependencies = [ + { name = "deprecated", marker = "python_full_version < '3.10'" }, + { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e7/4e/d7c7c91ff47cd96fe4095dd7231701aec7347426fd66872ff320d6cd1fcc/opentelemetry_semantic_conventions-0.50b0.tar.gz", hash = "sha256:02dc6dbcb62f082de9b877ff19a3f1ffaa3c306300fa53bfac761c4567c83d38", size = 100459 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/26/69be0f1a56a362c68fa0c7632d841b1b8f29d809bc6b1b897387c9f46973/opentelemetry_semantic_conventions-0.43b0-py3-none-any.whl", hash = "sha256:291284d7c1bf15fdaddf309b3bd6d3b7ce12a253cec6d27144439819a15d8445", size = 36840 }, + { url = "https://files.pythonhosted.org/packages/da/fb/dc15fad105450a015e913cfa4f5c27b6a5f1bea8fb649f8cae11e699c8af/opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl", hash = "sha256:e87efba8fdb67fb38113efea6a349531e75ed7ffc01562f65b802fcecb5e115e", size = 166602 }, ] [[package]] name = "opentelemetry-semantic-conventions" -version = "0.50b0" +version = "0.52b0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1359,20 +1346,11 @@ resolution-markers = [ ] dependencies = [ { name = "deprecated", marker = "python_full_version >= '3.10'" }, - { name = "opentelemetry-api", version = "1.29.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e7/4e/d7c7c91ff47cd96fe4095dd7231701aec7347426fd66872ff320d6cd1fcc/opentelemetry_semantic_conventions-0.50b0.tar.gz", hash = "sha256:02dc6dbcb62f082de9b877ff19a3f1ffaa3c306300fa53bfac761c4567c83d38", size = 100459 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/da/fb/dc15fad105450a015e913cfa4f5c27b6a5f1bea8fb649f8cae11e699c8af/opentelemetry_semantic_conventions-0.50b0-py3-none-any.whl", hash = "sha256:e87efba8fdb67fb38113efea6a349531e75ed7ffc01562f65b802fcecb5e115e", size = 166602 }, + { name = "opentelemetry-api", version = "1.31.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] - -[[package]] -name = "opentelemetry-semantic-conventions-ai" -version = "0.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/5f/76a9f82b08cdc05482a162d2bf67b5c0bbcc4118d4654f4b366f10fd71af/opentelemetry_semantic_conventions_ai-0.4.2.tar.gz", hash = "sha256:90b969c7d838e03e30a9150ffe46543d8e58e9d7370c7221fd30d4ce4d7a1b96", size = 4570 } +sdist = { url = "https://files.pythonhosted.org/packages/b1/0b/923171ff87ac44064b47bb798ac983917799458b7a5475d0badbf3f1e929/opentelemetry_semantic_conventions-0.52b0.tar.gz", hash = "sha256:f8bc8873a69d0a2f45746c31980baad2bb10ccee16b1816497ccf99417770386", size = 111274 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/bb/6b578a23c46ec87f364c809343cd8e80fcbcc7fc22129ee3dd1461aada81/opentelemetry_semantic_conventions_ai-0.4.2-py3-none-any.whl", hash = "sha256:0a5432aacd441eb7dbdf62e0de3f3d90ed4f69595b687a6dd2ccc4c5b94c5861", size = 5262 }, + { url = "https://files.pythonhosted.org/packages/5d/ca/56319c0dba740d1f5cd7b6db0dbe5c760400e49120975e063596eba25cc6/opentelemetry_semantic_conventions-0.52b0-py3-none-any.whl", hash = "sha256:4d843652ae1f9f3c0d4d8df0bfef740627c90495ac043fc33f0a04bad3b606e2", size = 183409 }, ] [[package]] @@ -1538,38 +1516,10 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/b6/c5319caea262f4821995dca2107483b94a3345d4607ad797c76cb9c36bcc/propcache-0.2.1-py3-none-any.whl", hash = "sha256:52277518d6aae65536e9cea52d4e7fd2f7a66f4aa2d30ed3f2fcea620ace3c54", size = 11818 }, ] -[[package]] -name = "protobuf" -version = "4.25.5" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.10' and platform_python_implementation == 'PyPy'", - "python_full_version < '3.10' and platform_python_implementation != 'PyPy'", -] -sdist = { url = "https://files.pythonhosted.org/packages/67/dd/48d5fdb68ec74d70fabcc252e434492e56f70944d9f17b6a15e3746d2295/protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584", size = 380315 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/00/35/1b3c5a5e6107859c4ca902f4fbb762e48599b78129a05d20684fef4a4d04/protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8", size = 392457 }, - { url = "https://files.pythonhosted.org/packages/a7/ad/bf3f358e90b7e70bf7fb520702cb15307ef268262292d3bdb16ad8ebc815/protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea", size = 413449 }, - { url = "https://files.pythonhosted.org/packages/51/49/d110f0a43beb365758a252203c43eaaad169fe7749da918869a8c991f726/protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173", size = 394248 }, - { url = "https://files.pythonhosted.org/packages/c6/ab/0f384ca0bc6054b1a7b6009000ab75d28a5506e4459378b81280ae7fd358/protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d", size = 293717 }, - { url = "https://files.pythonhosted.org/packages/05/a6/094a2640be576d760baa34c902dcb8199d89bce9ed7dd7a6af74dcbbd62d/protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331", size = 294635 }, - { url = "https://files.pythonhosted.org/packages/6a/1e/73a7f7a6c21dcca8ba0ca90d5404a5011c388dd87e2ea1a9f11ea6b61ec0/protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f", size = 392501 }, - { url = "https://files.pythonhosted.org/packages/26/1b/a6c17bb22bdda781ebf058fb88c3727f69bed9f7913c0c5835caf6bc09f5/protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45", size = 413396 }, - { url = "https://files.pythonhosted.org/packages/33/90/f198a61df8381fb43ae0fe81b3d2718e8dcc51ae8502c7657ab9381fbc4f/protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", size = 156467 }, -] - [[package]] name = "protobuf" version = "5.29.3" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation == 'PyPy'", - "python_full_version == '3.10.*' and platform_python_implementation == 'PyPy'", - "python_full_version >= '3.11' and python_full_version < '3.13' and platform_python_implementation != 'PyPy'", - "python_full_version == '3.10.*' and platform_python_implementation != 'PyPy'", -] sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945 } wheels = [ { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708 }, @@ -2005,15 +1955,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/94/0498cdb7316ed67a1928300dd87d659c933479f44dec51b4f62bfd1f8028/ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7", size = 9145708 }, ] -[[package]] -name = "setuptools" -version = "75.8.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6/setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", size = 1343222 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/69/8a/b9dc7678803429e4a3bc9ba462fa3dd9066824d3c607490235c6a796be5a/setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3", size = 1228782 }, -] - [[package]] name = "shellingham" version = "1.5.4"