diff --git a/python/packages/azurefunctions/agent_framework_azurefunctions/_orchestration.py b/python/packages/azurefunctions/agent_framework_azurefunctions/_orchestration.py index a1061d8ceb..7a5579b03f 100644 --- a/python/packages/azurefunctions/agent_framework_azurefunctions/_orchestration.py +++ b/python/packages/azurefunctions/agent_framework_azurefunctions/_orchestration.py @@ -17,6 +17,7 @@ load_agent_response, ) from azure.durable_functions.models import TaskBase +from azure.durable_functions.models.actions.NoOpAction import NoOpAction from azure.durable_functions.models.Task import CompoundTask, TaskState from pydantic import BaseModel @@ -42,6 +43,25 @@ def __init__( _TypedCompoundTask = CompoundTask +class PreCompletedTask(TaskBase): + """A simple task that is already completed with a result. + + Used for fire-and-forget mode where we want to return immediately + with an acceptance response without waiting for entity processing. + """ + + def __init__(self, result: Any): + """Initialize with a completed result. + + Args: + result: The result value for this completed task + """ + # Initialize with a NoOp action since we don't need actual orchestration actions + super().__init__(-1, NoOpAction()) + # Immediately mark as completed with the result + self.set_value(is_error=False, value=result) + + class AgentTask(_TypedCompoundTask): """A custom Task that wraps entity calls and provides typed AgentRunResponse results. @@ -62,10 +82,13 @@ def __init__( response_format: Optional Pydantic model for response parsing correlation_id: Correlation ID for logging """ - super().__init__([entity_task]) + # Set instance variables BEFORE calling super().__init__ + # because super().__init__ may trigger try_set_value for pre-completed tasks self._response_format = response_format self._correlation_id = correlation_id + super().__init__([entity_task]) + # Override action_repr to expose the inner task's action directly # This ensures compatibility with ReplaySchema V3 which expects Action objects. self.action_repr = entity_task.action_repr @@ -130,16 +153,27 @@ def get_run_request( message: str, response_format: type[BaseModel] | None, enable_tool_calls: bool, + wait_for_response: bool = True, ) -> RunRequest: """Get the current run request from the orchestration context. + Args: + message: The message to send to the agent + response_format: Optional Pydantic model for response parsing + enable_tool_calls: Whether to enable tool calls + wait_for_response: Must be True for orchestration contexts + Returns: RunRequest: The current run request + + Raises: + ValueError: If wait_for_response=False (not supported in orchestrations) """ request = super().get_run_request( message, response_format, enable_tool_calls, + wait_for_response, ) request.orchestration_id = self.context.instance_id return request @@ -166,7 +200,24 @@ def run_durable_agent( session_id, ) - entity_task = self.context.call_entity(entity_id, "run", run_request.to_dict()) + # Branch based on wait_for_response + if not run_request.wait_for_response: + # Fire-and-forget mode: signal entity and return pre-completed task + logger.info( + "[AzureFunctionsAgentExecutor] Fire-and-forget mode: signaling entity (correlation: %s)", + run_request.correlation_id, + ) + self.context.signal_entity(entity_id, "run", run_request.to_dict()) + + # Create acceptance response using base class helper + acceptance_response = self._create_acceptance_response(run_request.correlation_id) + + # Create a pre-completed task with the acceptance response + entity_task = PreCompletedTask(acceptance_response) + else: + # Blocking mode: call entity and wait for response + entity_task = self.context.call_entity(entity_id, "run", run_request.to_dict()) + return AgentTask( entity_task=entity_task, response_format=run_request.response_format, diff --git a/python/packages/azurefunctions/tests/test_orchestration.py b/python/packages/azurefunctions/tests/test_orchestration.py index 3da03f12be..c19d99177f 100644 --- a/python/packages/azurefunctions/tests/test_orchestration.py +++ b/python/packages/azurefunctions/tests/test_orchestration.py @@ -6,7 +6,7 @@ from unittest.mock import Mock import pytest -from agent_framework import AgentRunResponse, ChatMessage +from agent_framework import AgentRunResponse, ChatMessage, Role from agent_framework_durabletask import DurableAIAgent from azure.durable_functions.models.Task import TaskBase, TaskState @@ -206,6 +206,81 @@ def test_get_agent_raises_for_unregistered_agent(self) -> None: app.get_agent(Mock(), "MissingAgent") +class TestAzureFunctionsFireAndForget: + """Test fire-and-forget mode for AzureFunctionsAgentExecutor.""" + + def test_fire_and_forget_calls_signal_entity(self, executor_with_uuid: tuple[Any, Mock, str]) -> None: + """Verify wait_for_response=False calls signal_entity instead of call_entity.""" + executor, context, _ = executor_with_uuid + context.signal_entity = Mock() + context.call_entity = Mock(return_value=_create_entity_task()) + + agent = DurableAIAgent(executor, "TestAgent") + thread = agent.get_new_thread() + + # Run with wait_for_response=False + result = agent.run("Test message", thread=thread, wait_for_response=False) + + # Verify signal_entity was called and call_entity was not + assert context.signal_entity.call_count == 1 + assert context.call_entity.call_count == 0 + + # Should still return an AgentTask + assert isinstance(result, AgentTask) + + def test_fire_and_forget_returns_completed_task(self, executor_with_uuid: tuple[Any, Mock, str]) -> None: + """Verify wait_for_response=False returns pre-completed AgentTask.""" + executor, context, _ = executor_with_uuid + context.signal_entity = Mock() + + agent = DurableAIAgent(executor, "TestAgent") + thread = agent.get_new_thread() + + result = agent.run("Test message", thread=thread, wait_for_response=False) + + # Task should be immediately complete + assert isinstance(result, AgentTask) + assert result.is_completed + + def test_fire_and_forget_returns_acceptance_response(self, executor_with_uuid: tuple[Any, Mock, str]) -> None: + """Verify wait_for_response=False returns acceptance response.""" + executor, context, _ = executor_with_uuid + context.signal_entity = Mock() + + agent = DurableAIAgent(executor, "TestAgent") + thread = agent.get_new_thread() + + result = agent.run("Test message", thread=thread, wait_for_response=False) + + # Get the result + response = result.result + assert isinstance(response, AgentRunResponse) + assert len(response.messages) == 1 + assert response.messages[0].role == Role.SYSTEM + # Check message contains key information + message_text = response.messages[0].text + assert "accepted" in message_text.lower() + assert "background" in message_text.lower() + + def test_blocking_mode_still_works(self, executor_with_uuid: tuple[Any, Mock, str]) -> None: + """Verify wait_for_response=True uses call_entity as before.""" + executor, context, _ = executor_with_uuid + context.signal_entity = Mock() + context.call_entity = Mock(return_value=_create_entity_task()) + + agent = DurableAIAgent(executor, "TestAgent") + thread = agent.get_new_thread() + + result = agent.run("Test message", thread=thread, wait_for_response=True) + + # Verify call_entity was called and signal_entity was not + assert context.call_entity.call_count == 1 + assert context.signal_entity.call_count == 0 + + # Should return an AgentTask + assert isinstance(result, AgentTask) + + class TestOrchestrationIntegration: """Integration tests for orchestration scenarios.""" diff --git a/python/packages/durabletask/agent_framework_durabletask/_entities.py b/python/packages/durabletask/agent_framework_durabletask/_entities.py index 2e0b429233..7c99a4edc3 100644 --- a/python/packages/durabletask/agent_framework_durabletask/_entities.py +++ b/python/packages/durabletask/agent_framework_durabletask/_entities.py @@ -142,7 +142,7 @@ async def run( response_format = run_request.response_format enable_tool_calls = run_request.enable_tool_calls - logger.debug("[AgentEntity.run] Received Message: %s", run_request) + logger.debug("[AgentEntity.run] Received ThreadId %s Message: %s", thread_id, run_request) state_request = DurableAgentStateRequest.from_run_request(run_request) self.state.data.conversation_history.append(state_request) diff --git a/python/packages/durabletask/agent_framework_durabletask/_executors.py b/python/packages/durabletask/agent_framework_durabletask/_executors.py index 8ac893b943..d670a09a32 100644 --- a/python/packages/durabletask/agent_framework_durabletask/_executors.py +++ b/python/packages/durabletask/agent_framework_durabletask/_executors.py @@ -16,10 +16,10 @@ from datetime import datetime, timezone from typing import Any, Generic, TypeVar -from agent_framework import AgentRunResponse, AgentThread, ChatMessage, ErrorContent, Role, get_logger +from agent_framework import AgentRunResponse, AgentThread, ChatMessage, ErrorContent, Role, TextContent, get_logger from durabletask.client import TaskHubGrpcClient from durabletask.entities import EntityInstanceId -from durabletask.task import CompositeTask, OrchestrationContext, Task +from durabletask.task import CompletableTask, CompositeTask, OrchestrationContext, Task from pydantic import BaseModel from ._constants import DEFAULT_MAX_POLL_RETRIES, DEFAULT_POLL_INTERVAL_SECONDS @@ -33,16 +33,19 @@ TaskT = TypeVar("TaskT") -class DurableAgentTask(CompositeTask[AgentRunResponse]): +class DurableAgentTask(CompositeTask[AgentRunResponse], CompletableTask[AgentRunResponse]): """A custom Task that wraps entity calls and provides typed AgentRunResponse results. This task wraps the underlying entity call task and intercepts its completion to convert the raw result into a typed AgentRunResponse object. + + When yielded in an orchestration, this task returns an AgentRunResponse: + response: AgentRunResponse = yield durable_agent_task """ def __init__( self, - entity_task: Task[Any], + entity_task: CompletableTask[Any], response_format: type[BaseModel] | None, correlation_id: str, ): @@ -55,7 +58,7 @@ def __init__( """ self._response_format = response_format self._correlation_id = correlation_id - super().__init__([entity_task]) # type: ignore[misc] + super().__init__([entity_task]) # type: ignore def on_child_completed(self, task: Task[Any]) -> None: """Handle completion of the underlying entity task. @@ -69,11 +72,8 @@ def on_child_completed(self, task: Task[Any]) -> None: return if task.is_failed: - # Propagate the failure - self._exception = task.get_exception() - self._is_complete = True - if self._parent is not None: - self._parent.on_child_completed(self) + # Propagate the failure - pass the original exception directly + self.fail("call_entity Task failed", task.get_exception()) return # Task succeeded - transform the raw result @@ -94,18 +94,12 @@ def on_child_completed(self, task: Task[Any]) -> None: ) # Set the typed AgentRunResponse as this task's result - self._result = response - self._is_complete = True - - if self._parent is not None: - self._parent.on_child_completed(self) + self.complete(response) - except Exception: - logger.exception( - "[DurableAgentTask] Failed to convert result for correlation_id: %s", - self._correlation_id, - ) - raise + except Exception as ex: + err_msg = "[DurableAgentTask] Failed to convert result for correlation_id: " + self._correlation_id + logger.exception(err_msg) + self.fail(err_msg, ex) class DurableAgentExecutor(ABC, Generic[TaskT]): @@ -155,6 +149,7 @@ def get_run_request( message: str, response_format: type[BaseModel] | None, enable_tool_calls: bool, + wait_for_response: bool = True, ) -> RunRequest: """Create a RunRequest for the given parameters.""" correlation_id = self.generate_unique_id() @@ -162,9 +157,34 @@ def get_run_request( message=message, response_format=response_format, enable_tool_calls=enable_tool_calls, + wait_for_response=wait_for_response, correlation_id=correlation_id, ) + def _create_acceptance_response(self, correlation_id: str) -> AgentRunResponse: + """Create an acceptance response for fire-and-forget mode. + + Args: + correlation_id: Correlation ID for tracking the request + + Returns: + AgentRunResponse: Acceptance response with correlation ID + """ + acceptance_message = ChatMessage( + role=Role.SYSTEM, + contents=[ + TextContent( + f"Request accepted for processing (correlation_id: {correlation_id}). " + f"Agent is executing in the background. " + f"Retrieve response via your configured streaming or callback mechanism." + ) + ], + ) + return AgentRunResponse( + messages=[acceptance_message], + created_at=datetime.now(timezone.utc).isoformat(), + ) + class ClientAgentExecutor(DurableAgentExecutor[AgentRunResponse]): """Execution strategy for external clients. @@ -205,11 +225,20 @@ def run_durable_agent( thread: Optional conversation thread (creates new if not provided) Returns: - AgentRunResponse: The agent's response after execution completes + AgentRunResponse: The agent's response after execution completes, or an immediate + acknowledgement if wait_for_response is False """ # Signal the entity with the request entity_id = self._signal_agent_entity(agent_name, run_request, thread) + # If fire-and-forget mode, return immediately without polling + if not run_request.wait_for_response: + logger.info( + "[ClientAgentExecutor] Fire-and-forget mode: request signaled (correlation: %s)", + run_request.correlation_id, + ) + return self._create_acceptance_response(run_request.correlation_id) + # Poll for the response agent_response = self._poll_for_agent_response(entity_id, run_request.correlation_id) @@ -395,11 +424,16 @@ def __init__(self, context: OrchestrationContext): self._context = context logger.debug("[OrchestrationAgentExecutor] Initialized") + def generate_unique_id(self) -> str: + """Create a new UUID that is safe for replay within an orchestration or operation.""" + return self._context.new_uuid() + def get_run_request( self, message: str, response_format: type[BaseModel] | None, enable_tool_calls: bool, + wait_for_response: bool = True, ) -> RunRequest: """Get the current run request from the orchestration context. @@ -410,6 +444,7 @@ def get_run_request( message, response_format, enable_tool_calls, + wait_for_response, ) request.orchestration_id = self._context.instance_id return request @@ -449,8 +484,22 @@ def run_durable_agent( session_id, ) - # Call the entity and get the underlying task - entity_task: Task[Any] = self._context.call_entity(entity_id, "run", run_request.to_dict()) # type: ignore + # Branch based on wait_for_response + if not run_request.wait_for_response: + # Fire-and-forget mode: signal entity and return pre-completed task + logger.info( + "[OrchestrationAgentExecutor] Fire-and-forget mode: signaling entity (correlation: %s)", + run_request.correlation_id, + ) + self._context.signal_entity(entity_id, "run", run_request.to_dict()) + + # Create a pre-completed task with acceptance response + acceptance_response = self._create_acceptance_response(run_request.correlation_id) + entity_task: CompletableTask[AgentRunResponse] = CompletableTask() + entity_task.complete(acceptance_response) + else: + # Blocking mode: call entity and wait for response + entity_task = self._context.call_entity(entity_id, "run", run_request.to_dict()) # type: ignore # Wrap in DurableAgentTask for response transformation return DurableAgentTask( diff --git a/python/packages/durabletask/agent_framework_durabletask/_models.py b/python/packages/durabletask/agent_framework_durabletask/_models.py index 947ab7a17f..169417ebae 100644 --- a/python/packages/durabletask/agent_framework_durabletask/_models.py +++ b/python/packages/durabletask/agent_framework_durabletask/_models.py @@ -104,6 +104,8 @@ class RunRequest: role: The role of the message sender (user, system, or assistant) response_format: Optional Pydantic BaseModel type describing the structured response format enable_tool_calls: Whether to enable tool calls for this request + wait_for_response: If True (default), caller will wait for agent response. If False, + returns immediately after signaling (fire-and-forget mode) correlation_id: Correlation ID for tracking the response to this specific request created_at: Optional timestamp when the request was created orchestration_id: Optional ID of the orchestration that initiated this request @@ -115,6 +117,7 @@ class RunRequest: role: Role = Role.USER response_format: type[BaseModel] | None = None enable_tool_calls: bool = True + wait_for_response: bool = True created_at: datetime | None = None orchestration_id: str | None = None @@ -126,6 +129,7 @@ def __init__( role: Role | str | None = Role.USER, response_format: type[BaseModel] | None = None, enable_tool_calls: bool = True, + wait_for_response: bool = True, created_at: datetime | None = None, orchestration_id: str | None = None, ) -> None: @@ -135,6 +139,7 @@ def __init__( self.response_format = response_format self.request_response_format = request_response_format self.enable_tool_calls = enable_tool_calls + self.wait_for_response = wait_for_response self.created_at = created_at if created_at is not None else datetime.now(tz=timezone.utc) self.orchestration_id = orchestration_id @@ -155,6 +160,7 @@ def to_dict(self) -> dict[str, Any]: result = { "message": self.message, "enable_tool_calls": self.enable_tool_calls, + "wait_for_response": self.wait_for_response, "role": self.role.value, "request_response_format": self.request_response_format, "correlationId": self.correlation_id, @@ -198,6 +204,7 @@ def from_dict(cls, data: dict[str, Any]) -> RunRequest: request_response_format=data.get("request_response_format", REQUEST_RESPONSE_FORMAT_TEXT), role=cls.coerce_role(data.get("role")), response_format=_deserialize_response_format(data.get("response_format")), + wait_for_response=data.get("wait_for_response", True), enable_tool_calls=data.get("enable_tool_calls", True), created_at=created_at, orchestration_id=data.get("orchestrationId"), diff --git a/python/packages/durabletask/agent_framework_durabletask/_response_utils.py b/python/packages/durabletask/agent_framework_durabletask/_response_utils.py index aeb0e19c6c..123de7b0cf 100644 --- a/python/packages/durabletask/agent_framework_durabletask/_response_utils.py +++ b/python/packages/durabletask/agent_framework_durabletask/_response_utils.py @@ -51,10 +51,20 @@ def ensure_response_format( response_format: Optional Pydantic model class to parse the response value into correlation_id: Correlation ID for logging purposes response: The AgentRunResponse object to validate and parse + + Raises: + ValueError: If response_format is specified but response.value cannot be parsed """ if response_format is not None and not isinstance(response.value, response_format): response.try_parse_value(response_format) + # Validate that parsing succeeded + if not isinstance(response.value, response_format): + raise ValueError( + f"Response value could not be parsed into required format {response_format.__name__} " + f"for correlation_id {correlation_id}" + ) + logger.debug( "[ensure_response_format] Loaded AgentRunResponse.value for correlation_id %s with type: %s", correlation_id, diff --git a/python/packages/durabletask/agent_framework_durabletask/_shim.py b/python/packages/durabletask/agent_framework_durabletask/_shim.py index c2e9aee039..5bfde22e74 100644 --- a/python/packages/durabletask/agent_framework_durabletask/_shim.py +++ b/python/packages/durabletask/agent_framework_durabletask/_shim.py @@ -108,9 +108,20 @@ def run( # pyright: ignore[reportIncompatibleMethodOverride] thread: AgentThread | None = None, response_format: type[BaseModel] | None = None, enable_tool_calls: bool = True, + wait_for_response: bool = True, ) -> TaskT: """Execute the agent via the injected provider. + Args: + messages: The message(s) to send to the agent + thread: Optional agent thread for conversation context + response_format: Optional Pydantic model for structured response + enable_tool_calls: Whether to enable tool calls for this request + wait_for_response: If True (default), waits for agent response. + If False, returns immediately (fire-and-forget mode). + + **Only supported for DurableAIAgentClient contexts.** + Note: This method overrides AgentProtocol.run() with a different return type: - AgentProtocol.run() returns Coroutine[Any, Any, AgentRunResponse] (async) @@ -121,6 +132,9 @@ def run( # pyright: ignore[reportIncompatibleMethodOverride] Returns: TaskT: The task type specific to the executor + + Raises: + ValueError: If wait_for_response=False is used in an unsupported context """ message_str = self._normalize_messages(messages) @@ -128,6 +142,7 @@ def run( # pyright: ignore[reportIncompatibleMethodOverride] message=message_str, response_format=response_format, enable_tool_calls=enable_tool_calls, + wait_for_response=wait_for_response, ) return self._executor.run_durable_agent( diff --git a/python/packages/durabletask/pyproject.toml b/python/packages/durabletask/pyproject.toml index ccaea5782e..48964a589e 100644 --- a/python/packages/durabletask/pyproject.toml +++ b/python/packages/durabletask/pyproject.toml @@ -23,8 +23,8 @@ classifiers = [ ] dependencies = [ "agent-framework-core", - "durabletask>=1.1.0", - "durabletask-azuremanaged>=1.1.0" + "durabletask>=1.3.0", + "durabletask-azuremanaged>=1.3.0" ] [dependency-groups] diff --git a/python/packages/durabletask/tests/test_executors.py b/python/packages/durabletask/tests/test_executors.py index a42200bdea..ac8bd729a4 100644 --- a/python/packages/durabletask/tests/test_executors.py +++ b/python/packages/durabletask/tests/test_executors.py @@ -39,7 +39,10 @@ def mock_client() -> Mock: @pytest.fixture def mock_entity_task() -> Mock: """Provide a mock entity task.""" - return Mock(spec=Task) + task = Mock(spec=Task) + task.is_complete = False + task.is_failed = False + return task @pytest.fixture @@ -77,6 +80,32 @@ def successful_agent_response() -> dict[str, Any]: } +@pytest.fixture +def configure_successful_entity_task(mock_entity_task: Mock) -> Any: + """Provide a helper to configure mock_entity_task with a successful response.""" + + def _configure(response: dict[str, Any]) -> Mock: + mock_entity_task.is_failed = False + mock_entity_task.is_complete = False + mock_entity_task.get_result = Mock(return_value=response) + return mock_entity_task + + return _configure + + +@pytest.fixture +def configure_failed_entity_task(mock_entity_task: Mock) -> Any: + """Provide a helper to configure mock_entity_task with a failure.""" + + def _configure(exception: Exception) -> Mock: + mock_entity_task.is_failed = True + mock_entity_task.is_complete = True + mock_entity_task.get_exception = Mock(return_value=exception) + return mock_entity_task + + return _configure + + class TestExecutorThreadCreation: """Test that executors properly create DurableAgentThread with parameters.""" @@ -176,6 +205,115 @@ def test_executor_respects_custom_poll_interval(self, mock_client: Mock, sample_ assert isinstance(result, AgentRunResponse) +class TestClientAgentExecutorFireAndForget: + """Test fire-and-forget mode (wait_for_response=False) for ClientAgentExecutor.""" + + def test_fire_and_forget_returns_immediately(self, mock_client: Mock) -> None: + """Verify wait_for_response=False returns immediately without polling.""" + executor = ClientAgentExecutor(mock_client, max_poll_retries=10, poll_interval_seconds=0.1) + + # Create a request with wait_for_response=False + request = RunRequest(message="test message", correlation_id="test-123", wait_for_response=False) + + # Measure time taken + start = time.time() + result = executor.run_durable_agent("test_agent", request) + elapsed = time.time() - start + + # Should return immediately without polling (elapsed time should be very small) + assert elapsed < 0.1 # Much faster than any polling would take + + # Should return an AgentRunResponse + assert isinstance(result, AgentRunResponse) + + # Should have signaled the entity but not polled + assert mock_client.signal_entity.call_count == 1 + assert mock_client.get_entity.call_count == 0 # No polling occurred + + def test_fire_and_forget_returns_empty_response(self, mock_client: Mock) -> None: + """Verify wait_for_response=False returns an acceptance message with correlation ID.""" + executor = ClientAgentExecutor(mock_client) + + request = RunRequest(message="test message", correlation_id="test-456", wait_for_response=False) + + result = executor.run_durable_agent("test_agent", request) + + # Verify it contains an acceptance message + assert isinstance(result, AgentRunResponse) + assert len(result.messages) == 1 + assert result.messages[0].role == Role.SYSTEM + # Check message contains key information + message_text = result.messages[0].text + assert "accepted" in message_text.lower() + assert "test-456" in message_text # Contains correlation ID + assert "background" in message_text.lower() + + +class TestOrchestrationAgentExecutorFireAndForget: + """Test fire-and-forget mode for OrchestrationAgentExecutor.""" + + def test_orchestration_fire_and_forget_calls_signal_entity(self, mock_orchestration_context: Mock) -> None: + """Verify wait_for_response=False calls signal_entity instead of call_entity.""" + executor = OrchestrationAgentExecutor(mock_orchestration_context) + mock_orchestration_context.signal_entity = Mock() + + request = RunRequest(message="test", correlation_id="test-123", wait_for_response=False) + + result = executor.run_durable_agent("test_agent", request) + + # Verify signal_entity was called and call_entity was not + assert mock_orchestration_context.signal_entity.call_count == 1 + assert mock_orchestration_context.call_entity.call_count == 0 + + # Should still return a DurableAgentTask + assert isinstance(result, DurableAgentTask) + + def test_orchestration_fire_and_forget_returns_completed_task(self, mock_orchestration_context: Mock) -> None: + """Verify wait_for_response=False returns pre-completed DurableAgentTask.""" + executor = OrchestrationAgentExecutor(mock_orchestration_context) + mock_orchestration_context.signal_entity = Mock() + + request = RunRequest(message="test", correlation_id="test-456", wait_for_response=False) + + result = executor.run_durable_agent("test_agent", request) + + # Task should be immediately complete + assert isinstance(result, DurableAgentTask) + assert result.is_complete + + def test_orchestration_fire_and_forget_returns_acceptance_response(self, mock_orchestration_context: Mock) -> None: + """Verify wait_for_response=False returns acceptance response.""" + executor = OrchestrationAgentExecutor(mock_orchestration_context) + mock_orchestration_context.signal_entity = Mock() + + request = RunRequest(message="test", correlation_id="test-789", wait_for_response=False) + + result = executor.run_durable_agent("test_agent", request) + + # Get the result + response = result.get_result() + assert isinstance(response, AgentRunResponse) + assert len(response.messages) == 1 + assert response.messages[0].role == Role.SYSTEM + assert "test-789" in response.messages[0].text + + def test_orchestration_blocking_mode_calls_call_entity(self, mock_orchestration_context: Mock) -> None: + """Verify wait_for_response=True uses call_entity as before.""" + executor = OrchestrationAgentExecutor(mock_orchestration_context) + mock_orchestration_context.signal_entity = Mock() + + request = RunRequest(message="test", correlation_id="test-abc", wait_for_response=True) + + result = executor.run_durable_agent("test_agent", request) + + # Verify call_entity was called and signal_entity was not + assert mock_orchestration_context.call_entity.call_count == 1 + assert mock_orchestration_context.signal_entity.call_count == 0 + + # Should return a DurableAgentTask + assert isinstance(result, DurableAgentTask) + + class TestOrchestrationAgentExecutorRun: """Test OrchestrationAgentExecutor.run_durable_agent implementation.""" @@ -240,11 +378,10 @@ class TestDurableAgentTask: """Test DurableAgentTask completion and response transformation.""" def test_durable_agent_task_transforms_successful_result( - self, mock_entity_task: Mock, successful_agent_response: dict[str, Any] + self, configure_successful_entity_task: Any, successful_agent_response: dict[str, Any] ) -> None: """Verify DurableAgentTask converts successful entity result to AgentRunResponse.""" - mock_entity_task.is_failed = False - mock_entity_task.get_result = Mock(return_value=successful_agent_response) + mock_entity_task = configure_successful_entity_task(successful_agent_response) task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") @@ -257,10 +394,9 @@ def test_durable_agent_task_transforms_successful_result( assert len(result.messages) == 1 assert result.messages[0].role == Role.ASSISTANT - def test_durable_agent_task_propagates_failure(self, mock_entity_task: Mock) -> None: + def test_durable_agent_task_propagates_failure(self, configure_failed_entity_task: Any) -> None: """Verify DurableAgentTask propagates task failures.""" - mock_entity_task.is_failed = True - mock_entity_task.get_exception = Mock(return_value=ValueError("Entity error")) + mock_entity_task = configure_failed_entity_task(ValueError("Entity error")) task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") @@ -269,19 +405,17 @@ def test_durable_agent_task_propagates_failure(self, mock_entity_task: Mock) -> assert task.is_complete assert task.is_failed + # The exception is wrapped in TaskFailedError by the durabletask library exception = task.get_exception() - assert isinstance(exception, ValueError) - assert str(exception) == "Entity error" + assert exception is not None - def test_durable_agent_task_validates_response_format(self, mock_entity_task: Mock) -> None: + def test_durable_agent_task_validates_response_format(self, configure_successful_entity_task: Any) -> None: """Verify DurableAgentTask validates response format when provided.""" - mock_entity_task.is_failed = False - mock_entity_task.get_result = Mock( - return_value={ - "messages": [{"role": "assistant", "contents": [{"type": "text", "text": '{"answer": "42"}'}]}], - "created_at": "2025-12-30T10:00:00Z", - } - ) + response = { + "messages": [{"role": "assistant", "contents": [{"type": "text", "text": '{"answer": "42"}'}]}], + "created_at": "2025-12-30T10:00:00Z", + } + mock_entity_task = configure_successful_entity_task(response) class TestResponse(BaseModel): answer: str @@ -296,11 +430,10 @@ class TestResponse(BaseModel): assert isinstance(result, AgentRunResponse) def test_durable_agent_task_ignores_duplicate_completion( - self, mock_entity_task: Mock, successful_agent_response: dict[str, Any] + self, configure_successful_entity_task: Any, successful_agent_response: dict[str, Any] ) -> None: """Verify DurableAgentTask ignores duplicate completion calls.""" - mock_entity_task.is_failed = False - mock_entity_task.get_result = Mock(return_value=successful_agent_response) + mock_entity_task = configure_successful_entity_task(successful_agent_response) task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") @@ -315,6 +448,124 @@ def test_durable_agent_task_ignores_duplicate_completion( assert first_result is second_result assert mock_entity_task.get_result.call_count == 1 + def test_durable_agent_task_fails_on_malformed_response(self, configure_successful_entity_task: Any) -> None: + """Verify DurableAgentTask fails when entity returns malformed response data.""" + # Use data that will cause AgentRunResponse.from_dict to fail + # Using a list instead of dict, or other invalid structure + mock_entity_task = configure_successful_entity_task("invalid string response") + + task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") + + # Simulate child task completion with malformed data + task.on_child_completed(mock_entity_task) + + assert task.is_complete + assert task.is_failed + + def test_durable_agent_task_fails_on_invalid_response_format(self, configure_successful_entity_task: Any) -> None: + """Verify DurableAgentTask fails when response doesn't match required format.""" + response = { + "messages": [{"role": "assistant", "contents": [{"type": "text", "text": '{"wrong": "field"}'}]}], + "created_at": "2025-12-30T10:00:00Z", + } + mock_entity_task = configure_successful_entity_task(response) + + class StrictResponse(BaseModel): + required_field: str + + task = DurableAgentTask(entity_task=mock_entity_task, response_format=StrictResponse, correlation_id="test-123") + + # Simulate child task completion with wrong format + task.on_child_completed(mock_entity_task) + + assert task.is_complete + assert task.is_failed + + def test_durable_agent_task_handles_empty_response(self, configure_successful_entity_task: Any) -> None: + """Verify DurableAgentTask handles response with empty messages list.""" + response: dict[str, str | list[Any]] = { + "messages": [], + "created_at": "2025-12-30T10:00:00Z", + } + mock_entity_task = configure_successful_entity_task(response) + + task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") + + # Simulate child task completion + task.on_child_completed(mock_entity_task) + + assert task.is_complete + result = task.get_result() + assert isinstance(result, AgentRunResponse) + assert len(result.messages) == 0 + + def test_durable_agent_task_handles_multiple_messages(self, configure_successful_entity_task: Any) -> None: + """Verify DurableAgentTask correctly processes response with multiple messages.""" + response = { + "messages": [ + {"role": "assistant", "contents": [{"type": "text", "text": "First message"}]}, + {"role": "assistant", "contents": [{"type": "text", "text": "Second message"}]}, + ], + "created_at": "2025-12-30T10:00:00Z", + } + mock_entity_task = configure_successful_entity_task(response) + + task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") + + # Simulate child task completion + task.on_child_completed(mock_entity_task) + + assert task.is_complete + result = task.get_result() + assert isinstance(result, AgentRunResponse) + assert len(result.messages) == 2 + assert result.messages[0].role == Role.ASSISTANT + assert result.messages[1].role == Role.ASSISTANT + + def test_durable_agent_task_is_not_complete_initially(self, mock_entity_task: Mock) -> None: + """Verify DurableAgentTask is not complete when first created.""" + task = DurableAgentTask(entity_task=mock_entity_task, response_format=None, correlation_id="test-123") + + assert not task.is_complete + assert not task.is_failed + + def test_durable_agent_task_completes_with_complex_response_format( + self, configure_successful_entity_task: Any + ) -> None: + """Verify DurableAgentTask validates complex nested response formats correctly.""" + response = { + "messages": [ + { + "role": "assistant", + "contents": [ + { + "type": "text", + "text": '{"name": "test", "count": 42, "items": ["a", "b", "c"]}', + } + ], + } + ], + "created_at": "2025-12-30T10:00:00Z", + } + mock_entity_task = configure_successful_entity_task(response) + + class ComplexResponse(BaseModel): + name: str + count: int + items: list[str] + + task = DurableAgentTask( + entity_task=mock_entity_task, response_format=ComplexResponse, correlation_id="test-123" + ) + + # Simulate child task completion + task.on_child_completed(mock_entity_task) + + assert task.is_complete + assert not task.is_failed + result = task.get_result() + assert isinstance(result, AgentRunResponse) + if __name__ == "__main__": pytest.main([__file__, "-v", "--tb=short"]) diff --git a/python/packages/durabletask/tests/test_models.py b/python/packages/durabletask/tests/test_models.py index ffcfe868e1..5a93d74e22 100644 --- a/python/packages/durabletask/tests/test_models.py +++ b/python/packages/durabletask/tests/test_models.py @@ -25,6 +25,7 @@ def test_init_with_defaults(self) -> None: assert request.role == Role.USER assert request.response_format is None assert request.enable_tool_calls is True + assert request.wait_for_response is True def test_init_with_all_fields(self) -> None: """Test RunRequest initialization with all fields.""" @@ -35,6 +36,7 @@ def test_init_with_all_fields(self) -> None: role=Role.SYSTEM, response_format=schema, enable_tool_calls=False, + wait_for_response=False, ) assert request.message == "Hello" @@ -42,6 +44,7 @@ def test_init_with_all_fields(self) -> None: assert request.role == Role.SYSTEM assert request.response_format is schema assert request.enable_tool_calls is False + assert request.wait_for_response is False def test_init_coerces_string_role(self) -> None: """Ensure string role values are coerced into Role instances.""" @@ -56,6 +59,7 @@ def test_to_dict_with_defaults(self) -> None: assert data["message"] == "Test message" assert data["enable_tool_calls"] is True + assert data["wait_for_response"] is True assert data["role"] == "user" assert data["correlationId"] == "corr-004" assert "response_format" not in data or data["response_format"] is None @@ -70,6 +74,7 @@ def test_to_dict_with_all_fields(self) -> None: role=Role.ASSISTANT, response_format=schema, enable_tool_calls=False, + wait_for_response=False, ) data = request.to_dict() @@ -80,6 +85,7 @@ def test_to_dict_with_all_fields(self) -> None: assert data["response_format"]["module"] == schema.__module__ assert data["response_format"]["qualname"] == schema.__qualname__ assert data["enable_tool_calls"] is False + assert data["wait_for_response"] is False assert "thread_id" not in data def test_from_dict_with_defaults(self) -> None: @@ -91,6 +97,7 @@ def test_from_dict_with_defaults(self) -> None: assert request.correlation_id == "corr-006" assert request.role == Role.USER assert request.enable_tool_calls is True + assert request.wait_for_response is True def test_from_dict_ignores_thread_id_field(self) -> None: """Ensure legacy thread_id input does not break RunRequest parsing.""" diff --git a/python/packages/durabletask/tests/test_shim.py b/python/packages/durabletask/tests/test_shim.py index 1fa348695c..4172de6403 100644 --- a/python/packages/durabletask/tests/test_shim.py +++ b/python/packages/durabletask/tests/test_shim.py @@ -34,7 +34,10 @@ def mock_executor() -> Mock: # Mock get_run_request to create actual RunRequest objects def create_run_request( - message: str, response_format: type[BaseModel] | None = None, enable_tool_calls: bool = True + message: str, + response_format: type[BaseModel] | None = None, + enable_tool_calls: bool = True, + wait_for_response: bool = True, ) -> RunRequest: import uuid @@ -43,6 +46,7 @@ def create_run_request( correlation_id=str(uuid.uuid4()), response_format=response_format, enable_tool_calls=enable_tool_calls, + wait_for_response=wait_for_response, ) mock.get_run_request = Mock(side_effect=create_run_request) diff --git a/python/samples/getting_started/azure_functions/07_single_agent_orchestration_hitl/function_app.py b/python/samples/getting_started/azure_functions/07_single_agent_orchestration_hitl/function_app.py index fc72ceb770..08a14ffe11 100644 --- a/python/samples/getting_started/azure_functions/07_single_agent_orchestration_hitl/function_app.py +++ b/python/samples/getting_started/azure_functions/07_single_agent_orchestration_hitl/function_app.py @@ -102,7 +102,6 @@ def content_generation_hitl_orchestration(context: DurableOrchestrationContext) ) content = initial_raw.value - logger.info("Type of content after extraction: %s", type(content)) # type: ignore[misc] if content is None or not isinstance(content, GeneratedContent): raise ValueError("Agent returned no content after extraction.") diff --git a/python/samples/getting_started/durabletask/01_single_agent/README.md b/python/samples/getting_started/durabletask/01_single_agent/README.md index 6e31b0a737..ffe3b1484a 100644 --- a/python/samples/getting_started/durabletask/01_single_agent/README.md +++ b/python/samples/getting_started/durabletask/01_single_agent/README.md @@ -1,6 +1,6 @@ -# Single Agent Sample +# Single Agent -This sample demonstrates how to use the durable agents extension to create a worker-client setup that hosts a single AI agent and provides interactive conversation via the Durable Task Scheduler. +This sample demonstrates how to create a worker-client setup that hosts a single AI agent and provides interactive conversation via the Durable Task Scheduler. ## Key Concepts Demonstrated @@ -15,18 +15,24 @@ See the [README.md](../README.md) file in the parent directory for more informat ## Running the Sample -With the environment setup, you can run the sample using separate worker and client processes: +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: -**Start the worker:** +**Option 1: Combined (Recommended for Testing)** ```bash cd samples/getting_started/durabletask/01_single_agent -python worker.py +python sample.py ``` -The worker will register the Joker agent and listen for requests. +**Option 2: Separate Processes** -**In a new terminal, run the client:** +Start the worker in one terminal: + +```bash +python worker.py +``` + +In a new terminal, run the client: ```bash python client.py @@ -58,9 +64,10 @@ Because light attracts bugs! You can view the state of the agent in the Durable Task Scheduler dashboard: 1. Open your browser and navigate to `http://localhost:8082` -2. In the dashboard, you can view the state of the Joker agent, including its conversation history and current state - -The agent maintains conversation state across multiple interactions, and you can inspect this state in the dashboard to understand how the durable agents extension manages conversation context. +2. In the dashboard, you can view: + - The state of the Joker agent entity (dafx-Joker) + - Conversation history and current state + - How the durable agents extension manages conversation context diff --git a/python/samples/getting_started/durabletask/01_single_agent/client.py b/python/samples/getting_started/durabletask/01_single_agent/client.py index bfd5147ea7..b38029b8cb 100644 --- a/python/samples/getting_started/durabletask/01_single_agent/client.py +++ b/python/samples/getting_started/durabletask/01_single_agent/client.py @@ -23,69 +23,95 @@ logger = logging.getLogger(__name__) -async def main() -> None: - """Main entry point for the client application.""" - logger.info("Starting Durable Task Agent Client...") +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableAIAgentClient: + """Create a configured DurableAIAgentClient. - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + + Returns: + Configured DurableAIAgentClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") - # Create a client using Azure Managed Durable Task - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + dts_client = DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", taskhub=taskhub_name, - token_credential=credential + token_credential=credential, + log_handler=log_handler ) - # Wrap it with the agent client - agent_client = DurableAIAgentClient(client) + return DurableAIAgentClient(dts_client) + + +def run_client(agent_client: DurableAIAgentClient) -> None: + """Run client interactions with the Joker agent. + Args: + agent_client: The DurableAIAgentClient instance + """ # Get a reference to the Joker agent - logger.info("Getting reference to Joker agent...") + logger.debug("Getting reference to Joker agent...") joker = agent_client.get_agent("Joker") # Create a new thread for the conversation thread = joker.get_new_thread() + logger.debug(f"Thread ID: {thread.session_id}") + logger.info("Start chatting with the Joker agent! (Type 'exit' to quit)") - logger.info(f"Created conversation thread: {thread.session_id}") - logger.info("") - - try: - # First message - message1 = "Tell me a short joke about cloud computing." - logger.info(f"User: {message1}") - logger.info("") + # Interactive conversation loop + while True: + # Get user input + try: + user_message = input("You: ").strip() + except (EOFError, KeyboardInterrupt): + logger.info("\nExiting...") + break - # Run the agent - this blocks until the response is ready - response1 = joker.run(message1, thread=thread) - logger.info(f"Agent: {response1.text}") - logger.info("") + # Check for exit command + if user_message.lower() == "exit": + logger.info("Goodbye!") + break - # Second message - continuing the conversation - message2 = "Now tell me one about Python programming." - logger.info(f"User: {message2}") - logger.info("") - - response2 = joker.run(message2, thread=thread) - logger.info(f"Agent: {response2.text}") - logger.info("") - - logger.info(f"Conversation completed successfully!") - logger.info(f"Thread ID: {thread.session_id}") + # Skip empty messages + if not user_message: + continue + # Send message to agent and get response + try: + response = joker.run(user_message, thread=thread) + logger.info(f"Joker: {response.text} \n") + except Exception as e: + logger.error(f"Error getting response: {e}") + + logger.info("Conversation completed.") + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task Agent Client...") + + # Create client using helper function + agent_client = get_client() + + try: + run_client(agent_client) except Exception as e: logger.exception(f"Error during agent interaction: {e}") finally: - logger.info("Client shutting down") + logger.debug("Client shutting down") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/01_single_agent/sample.py b/python/samples/getting_started/durabletask/01_single_agent/sample.py index cfbceaaebd..b8c39974c0 100644 --- a/python/samples/getting_started/durabletask/01_single_agent/sample.py +++ b/python/samples/getting_started/durabletask/01_single_agent/sample.py @@ -14,122 +14,42 @@ """ import logging -import os -from agent_framework.azure import AzureOpenAIChatClient -from agent_framework_durabletask import DurableAIAgentClient, DurableAIAgentWorker -from azure.identity import AzureCliCredential, DefaultAzureCredential from dotenv import load_dotenv -from durabletask.azuremanaged.client import DurableTaskSchedulerClient -from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker -# Configure logging -logging.basicConfig(level=logging.INFO) -logger = logging.getLogger(__name__) - - -def create_joker_agent(): - """Create the Joker agent using Azure OpenAI. - - Returns: - AgentProtocol: The configured Joker agent - """ - return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( - name="Joker", - instructions="You are good at telling jokes.", - ) +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker +# Configure logging (must be after imports to override their basicConfig) +logging.basicConfig(level=logging.INFO, force=True) +logger = logging.getLogger(__name__) def main(): """Main entry point - runs both worker and client in single process.""" - logger.info("Starting Durable Task Agent Sample (Combined Worker + Client)...") - - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") + logger.debug("Starting Durable Task Agent Sample (Combined Worker + Client)...") - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() - secure_channel = endpoint != "http://localhost:8080" + silent_handler = logging.NullHandler() - # Create and start the worker using a context manager - with DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=secure_channel, - taskhub=taskhub_name, - token_credential=credential - ) as worker: - - # Wrap with the agent worker - agent_worker = DurableAIAgentWorker(worker) - - # Create and register the Joker agent - logger.info("Creating and registering Joker agent...") - joker_agent = create_joker_agent() - agent_worker.add_agent(joker_agent) - - logger.info(f"✓ Registered agent: {joker_agent.name}") - logger.info(f" Entity name: dafx-{joker_agent.name}") - logger.info("") + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents using helper function + setup_worker(dts_worker) # Start the worker - worker.start() - logger.info("Worker started and listening for requests...") - logger.info("") - - # Create the client - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=secure_channel, - taskhub=taskhub_name, - token_credential=credential - ) - - # Wrap it with the agent client - agent_client = DurableAIAgentClient(client) - - # Get a reference to the Joker agent - logger.info("Getting reference to Joker agent...") - joker = agent_client.get_agent("Joker") - - # Create a new thread for the conversation - thread = joker.get_new_thread() + dts_worker.start() + logger.debug("Worker started and listening for requests...") - logger.info(f"Created conversation thread: {thread.session_id}") - logger.info("") + # Create the client using helper function + agent_client = get_client(log_handler=silent_handler) try: - # First message - message1 = "Tell me a short joke about cloud computing." - logger.info(f"User: {message1}") - logger.info("") - - # Run the agent - this blocks until the response is ready - response1 = joker.run(message1, thread=thread) - logger.info(f"Agent: {response1.text}; {response1}") - logger.info("") - - # Second message - continuing the conversation - message2 = "Now tell me one about Python programming." - logger.info(f"User: {message2}") - logger.info("") - - response2 = joker.run(message2, thread=thread) - logger.info(f"Agent: {response2.text}; {response2}") - logger.info("") - - logger.info(f"Conversation completed successfully!") - logger.info(f"Thread ID: {thread.session_id}") - + # Run client interactions using helper function + run_client(agent_client) except Exception as e: logger.exception(f"Error during agent interaction: {e}") - logger.info("") - logger.info("Sample completed. Worker shutting down...") + logger.debug("Sample completed. Worker shutting down...") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/01_single_agent/worker.py b/python/samples/getting_started/durabletask/01_single_agent/worker.py index 4c0d172915..47dc6ab5c7 100644 --- a/python/samples/getting_started/durabletask/01_single_agent/worker.py +++ b/python/samples/getting_started/durabletask/01_single_agent/worker.py @@ -19,7 +19,7 @@ from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker # Configure logging -logging.basicConfig(level=logging.INFO) +logging.basicConfig(level=logging.WARNING) logger = logging.getLogger(__name__) @@ -35,39 +35,70 @@ def create_joker_agent(): ) -async def main(): - """Main entry point for the worker process.""" - logger.info("Starting Durable Task Agent Worker...") +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() - # Create a worker using Azure Managed Durable Task - worker = DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", taskhub=taskhub_name, - token_credential=credential + token_credential=credential, + log_handler=log_handler ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with agents registered. + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents registered + """ # Wrap it with the agent worker agent_worker = DurableAIAgentWorker(worker) # Create and register the Joker agent - logger.info("Creating and registering Joker agent...") + logger.debug("Creating and registering Joker agent...") joker_agent = create_joker_agent() agent_worker.add_agent(joker_agent) - logger.info(f"✓ Registered agent: {joker_agent.name}") - logger.info(f" Entity name: dafx-{joker_agent.name}") - logger.info("") + logger.debug(f"✓ Registered agent: {joker_agent.name}") + logger.debug(f" Entity name: dafx-{joker_agent.name}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Agent Worker...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents + setup_worker(worker) + logger.info("Worker is ready and listening for requests...") logger.info("Press Ctrl+C to stop.") logger.info("") @@ -80,9 +111,9 @@ async def main(): while True: await asyncio.sleep(1) except KeyboardInterrupt: - logger.info("Worker shutdown initiated") + logger.debug("Worker shutdown initiated") - logger.info("Worker stopped") + logger.debug("Worker stopped") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/02_multi_agent/README.md b/python/samples/getting_started/durabletask/02_multi_agent/README.md new file mode 100644 index 0000000000..e9b2a36e19 --- /dev/null +++ b/python/samples/getting_started/durabletask/02_multi_agent/README.md @@ -0,0 +1,80 @@ +# Multi-Agent + +This sample demonstrates how to host multiple AI agents with different tools in a single worker-client setup using the Durable Task Scheduler. + +## Key Concepts Demonstrated + +- Hosting multiple agents (WeatherAgent and MathAgent) in a single worker process. +- Each agent with its own specialized tools and instructions. +- Interacting with different agents using separate conversation threads. +- Worker-client architecture for multi-agent systems. + +## Environment Setup + +See the [README.md](../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the Sample + +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: + +**Option 1: Combined (Recommended for Testing)** + +```bash +cd samples/getting_started/durabletask/02_multi_agent +python sample.py +``` + +**Option 2: Separate Processes** + +Start the worker in one terminal: + +```bash +python worker.py +``` + +In a new terminal, run the client: + +```bash +python client.py +``` + +The client will interact with both agents: + +``` +Starting Durable Task Multi-Agent Client... +Using taskhub: default +Using endpoint: http://localhost:8080 + +================================================================================ +Testing WeatherAgent +================================================================================ + +Created weather conversation thread: +User: What is the weather in Seattle? + +🔧 [TOOL CALLED] get_weather(location=Seattle) +✓ [TOOL RESULT] {'location': 'Seattle', 'temperature': 72, 'conditions': 'Sunny', 'humidity': 45} + +WeatherAgent: The current weather in Seattle is sunny with a temperature of 72°F and 45% humidity. + +================================================================================ +Testing MathAgent +================================================================================ + +Created math conversation thread: +User: Calculate a 20% tip on a $50 bill + +🔧 [TOOL CALLED] calculate_tip(bill_amount=50.0, tip_percentage=20.0) +✓ [TOOL RESULT] {'bill_amount': 50.0, 'tip_percentage': 20.0, 'tip_amount': 10.0, 'total': 60.0} + +MathAgent: For a $50 bill with a 20% tip, the tip amount is $10.00 and the total is $60.00. +``` + +## Viewing Agent State + +You can view the state of both agents in the Durable Task Scheduler dashboard: + +1. Open your browser and navigate to `http://localhost:8082` +2. In the dashboard, you can view: + - The state of both WeatherAgent and MathAgent entities (dafx-WeatherAgent, dafx-MathAgent) + - Each agent's conversation state across multiple interactions diff --git a/python/samples/getting_started/durabletask/02_multi_agent/client.py b/python/samples/getting_started/durabletask/02_multi_agent/client.py new file mode 100644 index 0000000000..33f2071e43 --- /dev/null +++ b/python/samples/getting_started/durabletask/02_multi_agent/client.py @@ -0,0 +1,115 @@ +"""Client application for interacting with multiple hosted agents. + +This client connects to the Durable Task Scheduler and interacts with two different +agents (WeatherAgent and MathAgent), demonstrating how to work with multiple agents +each with their own specialized capabilities and tools. + +Prerequisites: +- The worker must be running with both agents registered +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running +""" + +import asyncio +import logging +import os + +from agent_framework_durabletask import DurableAIAgentClient +from azure.identity import DefaultAzureCredential +from durabletask.azuremanaged.client import DurableTaskSchedulerClient + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableAIAgentClient: + """Create a configured DurableAIAgentClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + + Returns: + Configured DurableAIAgentClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + dts_client = DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + return DurableAIAgentClient(dts_client) + + +def run_client(agent_client: DurableAIAgentClient) -> None: + """Run client interactions with both WeatherAgent and MathAgent. + + Args: + agent_client: The DurableAIAgentClient instance + """ + logger.debug("Testing WeatherAgent") + + # Get reference to WeatherAgent + weather_agent = agent_client.get_agent("WeatherAgent") + weather_thread = weather_agent.get_new_thread() + + logger.debug(f"Created weather conversation thread: {weather_thread.session_id}") + + # Test WeatherAgent + weather_message = "What is the weather in Seattle?" + logger.info(f"User: {weather_message}") + + weather_response = weather_agent.run(weather_message, thread=weather_thread) + logger.info(f"WeatherAgent: {weather_response.text} \n") + + logger.debug("Testing MathAgent") + + # Get reference to MathAgent + math_agent = agent_client.get_agent("MathAgent") + math_thread = math_agent.get_new_thread() + + logger.debug(f"Created math conversation thread: {math_thread.session_id}") + + # Test MathAgent + math_message = "Calculate a 20% tip on a $50 bill" + logger.info(f"User: {math_message}") + + math_response = math_agent.run(math_message, thread=math_thread) + logger.info(f"MathAgent: {math_response.text} \n") + + logger.debug("Both agents completed successfully!") + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task Multi-Agent Client...") + + # Create client using helper function + agent_client = get_client() + + try: + run_client(agent_client) + except Exception as e: + logger.exception(f"Error during agent interaction: {e}") + finally: + logger.debug("Client shutting down") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/02_multi_agent/requirements.txt b/python/samples/getting_started/durabletask/02_multi_agent/requirements.txt new file mode 100644 index 0000000000..371b9e3b79 --- /dev/null +++ b/python/samples/getting_started/durabletask/02_multi_agent/requirements.txt @@ -0,0 +1,6 @@ +# Agent Framework packages (installing from local package until a package is published) +-e ../../../../ +-e ../../../../packages/durabletask + +# Azure authentication +azure-identity diff --git a/python/samples/getting_started/durabletask/02_multi_agent/sample.py b/python/samples/getting_started/durabletask/02_multi_agent/sample.py new file mode 100644 index 0000000000..9945601c20 --- /dev/null +++ b/python/samples/getting_started/durabletask/02_multi_agent/sample.py @@ -0,0 +1,57 @@ +"""Multi-Agent Sample - Durable Task Integration (Combined Worker + Client) + +This sample demonstrates running both the worker and client in a single process +for multiple agents with different tools. The worker registers two agents +(WeatherAgent and MathAgent), each with their own specialized capabilities. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running (e.g., using Docker) + +To run this sample: + python sample.py +""" + +import logging + +from dotenv import load_dotenv + +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker + +# Configure logging +logging.basicConfig(level=logging.INFO, force=True) +logger = logging.getLogger(__name__) + + +def main(): + """Main entry point - runs both worker and client in single process.""" + logger.debug("Starting Durable Task Multi-Agent Sample (Combined Worker + Client)...") + + silent_handler = logging.NullHandler() + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents using helper function + setup_worker(dts_worker) + + # Start the worker + dts_worker.start() + logger.debug("Worker started and listening for requests...") + + # Create the client using helper function + agent_client = get_client(log_handler=silent_handler) + + try: + # Run client interactions using helper function + run_client(agent_client) + except Exception as e: + logger.exception(f"Error during agent interaction: {e}") + + logger.debug("Sample completed. Worker shutting down...") + + +if __name__ == "__main__": + load_dotenv() + main() diff --git a/python/samples/getting_started/durabletask/02_multi_agent/worker.py b/python/samples/getting_started/durabletask/02_multi_agent/worker.py new file mode 100644 index 0000000000..3a759a1862 --- /dev/null +++ b/python/samples/getting_started/durabletask/02_multi_agent/worker.py @@ -0,0 +1,171 @@ +"""Worker process for hosting multiple agents with different tools using Durable Task. + +This worker registers two agents - a weather assistant and a math assistant - each +with their own specialized tools. This demonstrates how to host multiple agents +with different capabilities in a single worker process. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Start a Durable Task Scheduler (e.g., using Docker) +""" + +import asyncio +import logging +import os +from typing import Any + +from agent_framework.azure import AzureOpenAIChatClient +from agent_framework_durabletask import DurableAIAgentWorker +from azure.identity import AzureCliCredential, DefaultAzureCredential +from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Agent names +WEATHER_AGENT_NAME = "WeatherAgent" +MATH_AGENT_NAME = "MathAgent" + + +def get_weather(location: str) -> dict[str, Any]: + """Get current weather for a location.""" + logger.info(f"🔧 [TOOL CALLED] get_weather(location={location})") + result = { + "location": location, + "temperature": 72, + "conditions": "Sunny", + "humidity": 45, + } + logger.info(f"✓ [TOOL RESULT] {result}") + return result + + +def calculate_tip(bill_amount: float, tip_percentage: float = 15.0) -> dict[str, Any]: + """Calculate tip amount and total bill.""" + logger.info( + f"🔧 [TOOL CALLED] calculate_tip(bill_amount={bill_amount}, tip_percentage={tip_percentage})" + ) + tip = bill_amount * (tip_percentage / 100) + total = bill_amount + tip + result = { + "bill_amount": bill_amount, + "tip_percentage": tip_percentage, + "tip_amount": round(tip, 2), + "total": round(total, 2), + } + logger.info(f"✓ [TOOL RESULT] {result}") + return result + + +def create_weather_agent(): + """Create the Weather agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured Weather agent with weather tool + """ + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name=WEATHER_AGENT_NAME, + instructions="You are a helpful weather assistant. Provide current weather information.", + tools=[get_weather], + ) + + +def create_math_agent(): + """Create the Math agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured Math agent with calculation tools + """ + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name=MATH_AGENT_NAME, + instructions="You are a helpful math assistant. Help users with calculations like tip calculations.", + tools=[calculate_tip], + ) + + +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with multiple agents registered. + + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents registered + """ + # Wrap it with the agent worker + agent_worker = DurableAIAgentWorker(worker) + + # Create and register both agents + logger.debug("Creating and registering agents...") + weather_agent = create_weather_agent() + math_agent = create_math_agent() + + agent_worker.add_agent(weather_agent) + agent_worker.add_agent(math_agent) + + logger.debug(f"✓ Registered agents: {weather_agent.name}, {math_agent.name}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Multi-Agent Worker...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents + setup_worker(worker) + + logger.info("Worker is ready and listening for requests...") + logger.info("Press Ctrl+C to stop. \n") + + try: + # Start the worker (this blocks until stopped) + worker.start() + + # Keep the worker running + while True: + await asyncio.sleep(1) + except KeyboardInterrupt: + logger.debug("Worker shutdown initiated") + + logger.info("Worker stopped") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/README.md b/python/samples/getting_started/durabletask/03_single_agent_streaming/README.md new file mode 100644 index 0000000000..5505acbe6c --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/README.md @@ -0,0 +1,150 @@ +# Single Agent with Reliable Streaming + +This sample demonstrates how to use Redis Streams with agent response callbacks to enable reliable, resumable streaming for durable agents. Streaming responses are persisted to Redis, allowing clients to disconnect and reconnect without losing messages. + +## Key Concepts Demonstrated + +- Using `AgentResponseCallbackProtocol` to capture streaming agent responses. +- Persisting streaming chunks to Redis Streams for reliable delivery. +- Non-blocking agent execution with `wait_for_response=False` (fire-and-forget mode). +- Cursor-based resumption for disconnected clients. +- Decoupling agent execution from response streaming. + +## Prerequisites + +In addition to the common setup in the parent [README.md](../README.md), this sample requires Redis: + +```bash +docker run -d --name redis -p 6379:6379 redis:latest +``` + +## Environment Setup + +See the [README.md](../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +Additional environment variables for this sample: + +```bash +# Optional: Redis Configuration +REDIS_CONNECTION_STRING=redis://localhost:6379 +REDIS_STREAM_TTL_MINUTES=10 +``` + +## Running the Sample + +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: + +**Option 1: Combined (Recommended for Testing)** + +```bash +cd samples/getting_started/durabletask/03_single_agent_streaming +python sample.py +``` + +**Option 2: Separate Processes** + +Start the worker in one terminal: + +```bash +python worker.py +``` + +In a new terminal, run the client: + +```bash +python client.py +``` + +The client will send a travel planning request to the TravelPlanner agent and stream the response from Redis in real-time: + +``` +================================================================================ +TravelPlanner Agent - Redis Streaming Demo +================================================================================ + +You: Plan a 3-day trip to Tokyo with emphasis on culture and food + +TravelPlanner (streaming from Redis): +-------------------------------------------------------------------------------- +# Your Amazing 3-Day Tokyo Adventure! 🗾 + +Let me create the perfect cultural and culinary journey through Tokyo... + +## Day 1: Traditional Tokyo & First Impressions +... +(continues streaming) +... + +✓ Response complete! +``` + + +## How It Works + +### Redis Streaming Callback + +The `RedisStreamCallback` class implements `AgentResponseCallbackProtocol` to capture streaming updates and persist them to Redis: + +```python +class RedisStreamCallback(AgentResponseCallbackProtocol): + async def on_streaming_response_update(self, update, context): + # Write chunk to Redis Stream + async with await get_stream_handler() as handler: + await handler.write_chunk(thread_id, update.text, sequence) + + async def on_agent_response(self, response, context): + # Write end-of-stream marker + async with await get_stream_handler() as handler: + await handler.write_completion(thread_id, sequence) +``` + +### Worker Registration + +The worker registers the agent with the Redis streaming callback: + +```python +redis_callback = RedisStreamCallback() +agent_worker = DurableAIAgentWorker(worker, callback=redis_callback) +agent_worker.add_agent(create_travel_agent()) +``` + +### Client Streaming + +The client uses fire-and-forget mode to start the agent and streams from Redis: + +```python +# Start agent run with wait_for_response=False for non-blocking execution +travel_planner.run(user_message, thread=thread, wait_for_response=False) + +# Stream response from Redis while the agent is processing +async with await get_stream_handler() as stream_handler: + async for chunk in stream_handler.read_stream(thread_id): + if chunk.text: + print(chunk.text, end="", flush=True) + elif chunk.is_done: + break +``` + +**Fire-and-Forget Mode**: The `wait_for_response=False` parameter enables non-blocking execution. The `run()` method signals the agent and returns immediately, allowing the client to stream from Redis without blocking. + +### Cursor-Based Resumption + +Clients can resume streaming from any point after disconnection: + +```python +cursor = "1734649123456-0" # Entry ID from previous stream +async with await get_stream_handler() as stream_handler: + async for chunk in stream_handler.read_stream(thread_id, cursor=cursor): + # Process chunk +``` + +## Viewing Agent State + +You can view the state of the TravelPlanner agent in the Durable Task Scheduler dashboard: + +1. Open your browser and navigate to `http://localhost:8082` +2. In the dashboard, you can view: + - The state of the TravelPlanner agent entity (dafx-TravelPlanner) + - Conversation history and current state + - How the durable agents extension manages conversation context with streaming + diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/client.py b/python/samples/getting_started/durabletask/03_single_agent_streaming/client.py new file mode 100644 index 0000000000..be10eddc93 --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/client.py @@ -0,0 +1,186 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Client application for interacting with the TravelPlanner agent and streaming from Redis. + +This client demonstrates: +1. Sending a travel planning request to the durable agent +2. Streaming the response from Redis in real-time +3. Handling reconnection and cursor-based resumption + +Prerequisites: +- The worker must be running with the TravelPlanner agent registered +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME +- Redis must be running +- Durable Task Scheduler must be running +""" + +import asyncio +import logging +import os +from datetime import timedelta + +import redis.asyncio as aioredis +from agent_framework_durabletask import DurableAIAgentClient +from azure.identity import DefaultAzureCredential +from durabletask.azuremanaged.client import DurableTaskSchedulerClient + +from redis_stream_response_handler import RedisStreamResponseHandler + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Configuration +REDIS_CONNECTION_STRING = os.environ.get("REDIS_CONNECTION_STRING", "redis://localhost:6379") +REDIS_STREAM_TTL_MINUTES = int(os.environ.get("REDIS_STREAM_TTL_MINUTES", "10")) + + +async def get_stream_handler() -> RedisStreamResponseHandler: + """Create a new Redis stream handler for each request. + + This avoids event loop conflicts by creating a fresh Redis client + in the current event loop context. + """ + # Create a new Redis client in the current event loop + redis_client = aioredis.from_url( # type: ignore[reportUnknownMemberType] + REDIS_CONNECTION_STRING, + encoding="utf-8", + decode_responses=False, + ) + + return RedisStreamResponseHandler( + redis_client=redis_client, + stream_ttl=timedelta(minutes=REDIS_STREAM_TTL_MINUTES), + ) + + +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableAIAgentClient: + """Create a configured DurableAIAgentClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional log handler for client logging + + Returns: + Configured DurableAIAgentClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + dts_client = DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + return DurableAIAgentClient(dts_client) + + +async def stream_from_redis(thread_id: str, cursor: str | None = None) -> None: + """Stream agent responses from Redis. + + Args: + thread_id: The conversation/thread ID to stream from + cursor: Optional cursor to resume from. If None, starts from beginning. + """ + stream_key = f"agent-stream:{thread_id}" + logger.info(f"Streaming response from Redis (thread: {thread_id[:8]}...)") + logger.debug(f"To manually check Redis, run: redis-cli XLEN {stream_key}") + if cursor: + logger.info(f"Resuming from cursor: {cursor}") + + async with await get_stream_handler() as stream_handler: + logger.info(f"Stream handler created, starting to read...") + try: + chunk_count = 0 + async for chunk in stream_handler.read_stream(thread_id, cursor): + chunk_count += 1 + logger.debug(f"Received chunk #{chunk_count}: error={chunk.error}, is_done={chunk.is_done}, text_len={len(chunk.text) if chunk.text else 0}") + + if chunk.error: + logger.error(f"Stream error: {chunk.error}") + break + + if chunk.is_done: + print("\n✓ Response complete!", flush=True) + logger.info(f"Stream completed after {chunk_count} chunks") + break + + if chunk.text: + # Print directly to console with flush for immediate display + print(chunk.text, end='', flush=True) + + if chunk_count == 0: + logger.warning("No chunks received from Redis stream!") + logger.warning(f"Check Redis manually: redis-cli XLEN {stream_key}") + logger.warning(f"View stream contents: redis-cli XREAD STREAMS {stream_key} 0") + + except Exception as ex: + logger.error(f"Error reading from Redis: {ex}", exc_info=True) + + +def run_client(agent_client: DurableAIAgentClient) -> None: + """Run client interactions with the TravelPlanner agent. + + Args: + agent_client: The DurableAIAgentClient instance + """ + # Get a reference to the TravelPlanner agent + logger.debug("Getting reference to TravelPlanner agent...") + travel_planner = agent_client.get_agent("TravelPlanner") + + # Create a new thread for the conversation + thread = travel_planner.get_new_thread() + if not thread.session_id: + logger.error("Failed to create a new thread with session ID!") + return + + key = thread.session_id.key + logger.info(f"Thread ID: {key}") + + # Get user input + print("\nEnter your travel planning request:") + user_message = input("> ").strip() + + if not user_message: + logger.warning("No input provided. Using default message.") + user_message = "Plan a 3-day trip to Tokyo with emphasis on culture and food" + + logger.info(f"\nYou: {user_message}\n") + logger.info("TravelPlanner (streaming from Redis):") + logger.info("-" * 80) + + # Start the agent run with wait_for_response=False for non-blocking execution + # This signals the agent to start processing without waiting for completion + # The agent will execute in the background and write chunks to Redis + travel_planner.run(user_message, thread=thread, wait_for_response=False) + + # Stream the response from Redis + # This demonstrates that the client can stream from Redis while + # the agent is still processing (or after it completes) + asyncio.run(stream_from_redis(str(key))) + + logger.info("\nDemo completed!") + + +if __name__ == "__main__": + from dotenv import load_dotenv + load_dotenv() + + # Create the client + client = get_client() + + # Run the demo + run_client(client) diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/redis_stream_response_handler.py b/python/samples/getting_started/durabletask/03_single_agent_streaming/redis_stream_response_handler.py new file mode 100644 index 0000000000..981393cf00 --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/redis_stream_response_handler.py @@ -0,0 +1,200 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Redis-based streaming response handler for durable agents. + +This module provides reliable, resumable streaming of agent responses using Redis Streams +as a message broker. It enables clients to disconnect and reconnect without losing messages. +""" + +import asyncio +import time +from dataclasses import dataclass +from datetime import timedelta +from collections.abc import AsyncIterator + +import redis.asyncio as aioredis + + +@dataclass +class StreamChunk: + """Represents a chunk of streamed data from Redis. + + Attributes: + entry_id: The Redis stream entry ID (used as cursor for resumption). + text: The text content of the chunk, if any. + is_done: Whether this is the final chunk in the stream. + error: Error message if an error occurred, otherwise None. + """ + entry_id: str + text: str | None = None + is_done: bool = False + error: str | None = None + + +class RedisStreamResponseHandler: + """Handles agent responses by persisting them to Redis Streams. + + This handler writes agent response updates to Redis Streams, enabling reliable, + resumable streaming delivery to clients. Clients can disconnect and reconnect + at any point using cursor-based pagination. + + Attributes: + MAX_EMPTY_READS: Maximum number of empty reads before timing out. + POLL_INTERVAL_MS: Interval in milliseconds between polling attempts. + """ + + MAX_EMPTY_READS = 300 + POLL_INTERVAL_MS = 1000 + + def __init__(self, redis_client: aioredis.Redis, stream_ttl: timedelta): + """Initialize the Redis stream response handler. + + Args: + redis_client: The async Redis client instance. + stream_ttl: Time-to-live for stream entries in Redis. + """ + self._redis = redis_client + self._stream_ttl = stream_ttl + + async def __aenter__(self): + """Enter async context manager.""" + return self + + async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: object) -> None: + """Exit async context manager and close Redis connection.""" + await self._redis.aclose() + + async def write_chunk( + self, + conversation_id: str, + text: str, + sequence: int, + ) -> None: + """Write a single text chunk to the Redis Stream. + + Args: + conversation_id: The conversation ID for this agent run. + text: The text content to write. + sequence: The sequence number for ordering. + """ + stream_key = self._get_stream_key(conversation_id) + await self._redis.xadd( + stream_key, + { + "text": text, + "sequence": str(sequence), + "timestamp": str(int(time.time() * 1000)), + } + ) + await self._redis.expire(stream_key, self._stream_ttl) + + async def write_completion( + self, + conversation_id: str, + sequence: int, + ) -> None: + """Write an end-of-stream marker to the Redis Stream. + + Args: + conversation_id: The conversation ID for this agent run. + sequence: The final sequence number. + """ + stream_key = self._get_stream_key(conversation_id) + await self._redis.xadd( + stream_key, + { + "text": "", + "sequence": str(sequence), + "timestamp": str(int(time.time() * 1000)), + "done": "true", + } + ) + await self._redis.expire(stream_key, self._stream_ttl) + + async def read_stream( + self, + conversation_id: str, + cursor: str | None = None, + ) -> AsyncIterator[StreamChunk]: + """Read entries from a Redis Stream with cursor-based pagination. + + This method polls the Redis Stream for new entries, yielding chunks as they + become available. Clients can resume from any point using the entry_id from + a previous chunk. + + Args: + conversation_id: The conversation ID to read from. + cursor: Optional cursor to resume from. If None, starts from beginning. + + Yields: + StreamChunk instances containing text content or status markers. + """ + stream_key = self._get_stream_key(conversation_id) + start_id = cursor if cursor else "0-0" + + empty_read_count = 0 + has_seen_data = False + + while True: + try: + # Read up to 100 entries from the stream + entries = await self._redis.xread( + {stream_key: start_id}, + count=100, + block=None, + ) + + if not entries: + # No entries found + if not has_seen_data: + empty_read_count += 1 + if empty_read_count >= self.MAX_EMPTY_READS: + timeout_seconds = self.MAX_EMPTY_READS * self.POLL_INTERVAL_MS / 1000 + yield StreamChunk( + entry_id=start_id, + error=f"Stream not found or timed out after {timeout_seconds} seconds" + ) + return + + # Wait before polling again + await asyncio.sleep(self.POLL_INTERVAL_MS / 1000) + continue + + has_seen_data = True + + # Process entries from the stream + for _stream_name, stream_entries in entries: + for entry_id, entry_data in stream_entries: + start_id = entry_id.decode() if isinstance(entry_id, bytes) else entry_id + + # Decode entry data + text = entry_data.get(b"text", b"").decode() if b"text" in entry_data else None + done = entry_data.get(b"done", b"").decode() if b"done" in entry_data else None + error = entry_data.get(b"error", b"").decode() if b"error" in entry_data else None + + if error: + yield StreamChunk(entry_id=start_id, error=error) + return + + if done == "true": + yield StreamChunk(entry_id=start_id, is_done=True) + return + + if text: + yield StreamChunk(entry_id=start_id, text=text) + + except Exception as ex: + yield StreamChunk(entry_id=start_id, error=str(ex)) + return + + @staticmethod + def _get_stream_key(conversation_id: str) -> str: + """Generate the Redis key for a conversation's stream. + + Args: + conversation_id: The conversation ID. + + Returns: + The Redis stream key. + """ + return f"agent-stream:{conversation_id}" diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/requirements.txt b/python/samples/getting_started/durabletask/03_single_agent_streaming/requirements.txt new file mode 100644 index 0000000000..047a5d36f1 --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/requirements.txt @@ -0,0 +1,9 @@ +# Agent Framework packages (installing from local package until a package is published) +-e ../../../../ +-e ../../../../packages/durabletask + +# Azure authentication +azure-identity + +# Redis client +redis diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/sample.py b/python/samples/getting_started/durabletask/03_single_agent_streaming/sample.py new file mode 100644 index 0000000000..14de97caf8 --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/sample.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Single Agent Streaming Sample - Durable Task Integration (Combined Worker + Client) + +This sample demonstrates running both the worker and client in a single process +with reliable Redis-based streaming for agent responses. + +The worker is started first to register the TravelPlanner agent with Redis streaming +callback, then client operations are performed against the running worker. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running (e.g., using Docker) +- Redis must be running (e.g., docker run -d --name redis -p 6379:6379 redis:latest) + +To run this sample: + python sample.py +""" + +import logging + +from dotenv import load_dotenv + +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker + +# Configure logging (must be after imports to override their basicConfig) +logging.basicConfig(level=logging.INFO, force=True) +logger = logging.getLogger(__name__) + +def main(): + """Main entry point - runs both worker and client in single process.""" + logger.debug("Starting Durable Task Agent Sample with Redis Streaming...") + + silent_handler = logging.NullHandler() + + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents and callbacks using helper function + setup_worker(dts_worker) + + # Start the worker + dts_worker.start() + logger.debug("Worker started and listening for requests...") + + # Create the client using helper function + agent_client = get_client(log_handler=silent_handler) + + try: + # Run client interactions using helper function + run_client(agent_client) + except Exception as e: + logger.exception(f"Error during agent interaction: {e}") + + logger.debug("Sample completed. Worker shutting down...") + + +if __name__ == "__main__": + load_dotenv() + main() diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/tools.py b/python/samples/getting_started/durabletask/03_single_agent_streaming/tools.py new file mode 100644 index 0000000000..6a71fdfa03 --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/tools.py @@ -0,0 +1,165 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Mock travel tools for demonstration purposes. + +In a real application, these would call actual weather and events APIs. +""" + +from typing import Annotated + + +def get_weather_forecast( + destination: Annotated[str, "The destination city or location"], + date: Annotated[str, 'The date for the forecast (e.g., "2025-01-15" or "next Monday")'], +) -> str: + """Get the weather forecast for a destination on a specific date. + + Use this to provide weather-aware recommendations in the itinerary. + + Args: + destination: The destination city or location. + date: The date for the forecast. + + Returns: + A weather forecast summary. + """ + # Mock weather data based on destination for realistic responses + weather_by_region = { + "Tokyo": ("Partly cloudy with a chance of light rain", 58, 45), + "Paris": ("Overcast with occasional drizzle", 52, 41), + "New York": ("Clear and cold", 42, 28), + "London": ("Foggy morning, clearing in afternoon", 48, 38), + "Sydney": ("Sunny and warm", 82, 68), + "Rome": ("Sunny with light breeze", 62, 48), + "Barcelona": ("Partly sunny", 59, 47), + "Amsterdam": ("Cloudy with light rain", 46, 38), + "Dubai": ("Sunny and hot", 85, 72), + "Singapore": ("Tropical thunderstorms in afternoon", 88, 77), + "Bangkok": ("Hot and humid, afternoon showers", 91, 78), + "Los Angeles": ("Sunny and pleasant", 72, 55), + "San Francisco": ("Morning fog, afternoon sun", 62, 52), + "Seattle": ("Rainy with breaks", 48, 40), + "Miami": ("Warm and sunny", 78, 65), + "Honolulu": ("Tropical paradise weather", 82, 72), + } + + # Find a matching destination or use a default + forecast = ("Partly cloudy", 65, 50) + for city, weather in weather_by_region.items(): + if city.lower() in destination.lower(): + forecast = weather + break + + condition, high_f, low_f = forecast + high_c = (high_f - 32) * 5 // 9 + low_c = (low_f - 32) * 5 // 9 + + recommendation = _get_weather_recommendation(condition) + + return f"""Weather forecast for {destination} on {date}: +Conditions: {condition} +High: {high_f}°F ({high_c}°C) +Low: {low_f}°F ({low_c}°C) + +Recommendation: {recommendation}""" + + +def get_local_events( + destination: Annotated[str, "The destination city or location"], + date: Annotated[str, 'The date to search for events (e.g., "2025-01-15" or "next week")'], +) -> str: + """Get local events and activities happening at a destination around a specific date. + + Use this to suggest timely activities and experiences. + + Args: + destination: The destination city or location. + date: The date to search for events. + + Returns: + A list of local events and activities. + """ + # Mock events data based on destination + events_by_city = { + "Tokyo": [ + "🎭 Kabuki Theater Performance at Kabukiza Theatre - Traditional Japanese drama", + "🌸 Winter Illuminations at Yoyogi Park - Spectacular light displays", + "🍜 Ramen Festival at Tokyo Station - Sample ramen from across Japan", + "🎮 Gaming Expo at Tokyo Big Sight - Latest video games and technology", + ], + "Paris": [ + "🎨 Impressionist Exhibition at Musée d'Orsay - Extended evening hours", + "🍷 Wine Tasting Tour in Le Marais - Local sommelier guided", + "🎵 Jazz Night at Le Caveau de la Huchette - Historic jazz club", + "🥐 French Pastry Workshop - Learn from master pâtissiers", + ], + "New York": [ + "🎭 Broadway Show: Hamilton - Limited engagement performances", + "🏀 Knicks vs Lakers at Madison Square Garden", + "🎨 Modern Art Exhibit at MoMA - New installations", + "🍕 Pizza Walking Tour of Brooklyn - Artisan pizzerias", + ], + "London": [ + "👑 Royal Collection Exhibition at Buckingham Palace", + "🎭 West End Musical: The Phantom of the Opera", + "🍺 Craft Beer Festival at Brick Lane", + "🎪 Winter Wonderland at Hyde Park - Rides and markets", + ], + "Sydney": [ + "🏄 Pro Surfing Competition at Bondi Beach", + "🎵 Opera at Sydney Opera House - La Bohème", + "🦘 Wildlife Night Safari at Taronga Zoo", + "🍽️ Harbor Dinner Cruise with fireworks", + ], + "Rome": [ + "🏛️ After-Hours Vatican Tour - Skip the crowds", + "🍝 Pasta Making Class in Trastevere", + "🎵 Classical Concert at Borghese Gallery", + "🍷 Wine Tasting in Roman Cellars", + ], + } + + # Find events for the destination or use generic events + events = [ + "🎭 Local theater performance", + "🍽️ Food and wine festival", + "🎨 Art gallery opening", + "🎵 Live music at local venues", + ] + + for city, city_events in events_by_city.items(): + if city.lower() in destination.lower(): + events = city_events + break + + event_list = "\n• ".join(events) + return f"""Local events in {destination} around {date}: + +• {event_list} + +💡 Tip: Book popular events in advance as they may sell out quickly!""" + + +def _get_weather_recommendation(condition: str) -> str: + """Get a recommendation based on weather conditions. + + Args: + condition: The weather condition description. + + Returns: + A recommendation string. + """ + condition_lower = condition.lower() + + if "rain" in condition_lower or "drizzle" in condition_lower: + return "Bring an umbrella and waterproof jacket. Consider indoor activities for backup." + elif "fog" in condition_lower: + return "Morning visibility may be limited. Plan outdoor sightseeing for afternoon." + elif "cold" in condition_lower: + return "Layer up with warm clothing. Hot drinks and cozy cafés recommended." + elif "hot" in condition_lower or "warm" in condition_lower: + return "Stay hydrated and use sunscreen. Plan strenuous activities for cooler morning hours." + elif "thunder" in condition_lower or "storm" in condition_lower: + return "Keep an eye on weather updates. Have indoor alternatives ready." + else: + return "Pleasant conditions expected. Great day for outdoor exploration!" diff --git a/python/samples/getting_started/durabletask/03_single_agent_streaming/worker.py b/python/samples/getting_started/durabletask/03_single_agent_streaming/worker.py new file mode 100644 index 0000000000..93af682efc --- /dev/null +++ b/python/samples/getting_started/durabletask/03_single_agent_streaming/worker.py @@ -0,0 +1,251 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Worker process for hosting a TravelPlanner agent with reliable Redis streaming. + +This worker registers the TravelPlanner agent with the Durable Task Scheduler +and uses RedisStreamCallback to persist streaming responses to Redis for reliable delivery. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Start a Durable Task Scheduler (e.g., using Docker) +- Start Redis (e.g., docker run -d --name redis -p 6379:6379 redis:latest) +""" + +import asyncio +import logging +import os +from datetime import timedelta + +import redis.asyncio as aioredis +from agent_framework import AgentRunResponseUpdate +from agent_framework.azure import AzureOpenAIChatClient +from agent_framework_durabletask import AgentCallbackContext, AgentResponseCallbackProtocol, DurableAIAgentWorker +from azure.identity import AzureCliCredential, DefaultAzureCredential +from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker + +from redis_stream_response_handler import RedisStreamResponseHandler +from tools import get_local_events, get_weather_forecast + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Configuration +REDIS_CONNECTION_STRING = os.environ.get("REDIS_CONNECTION_STRING", "redis://localhost:6379") +REDIS_STREAM_TTL_MINUTES = int(os.environ.get("REDIS_STREAM_TTL_MINUTES", "10")) + + +async def get_stream_handler() -> RedisStreamResponseHandler: + """Create a new Redis stream handler for each request. + + This avoids event loop conflicts by creating a fresh Redis client + in the current event loop context. + """ + # Create a new Redis client in the current event loop + redis_client = aioredis.from_url( # type: ignore[reportUnknownMemberType] + REDIS_CONNECTION_STRING, + encoding="utf-8", + decode_responses=False, + ) + + return RedisStreamResponseHandler( + redis_client=redis_client, + stream_ttl=timedelta(minutes=REDIS_STREAM_TTL_MINUTES), + ) + + +class RedisStreamCallback(AgentResponseCallbackProtocol): + """Callback that writes streaming updates to Redis Streams for reliable delivery. + + This enables clients to disconnect and reconnect without losing messages. + """ + + def __init__(self) -> None: + self._sequence_numbers: dict[str, int] = {} # Track sequence per thread + + async def on_streaming_response_update( + self, + update: AgentRunResponseUpdate, + context: AgentCallbackContext, + ) -> None: + """Write streaming update to Redis Stream. + + Args: + update: The streaming response update chunk. + context: The callback context with thread_id, agent_name, etc. + """ + thread_id = context.thread_id + if not thread_id: + logger.warning("No thread_id available for streaming update") + return + + if not update.text: + return + + text = update.text + + # Get or initialize sequence number for this thread + if thread_id not in self._sequence_numbers: + self._sequence_numbers[thread_id] = 0 + + sequence = self._sequence_numbers[thread_id] + + try: + # Use context manager to ensure Redis client is properly closed + async with await get_stream_handler() as stream_handler: + # Write chunk to Redis Stream using public API + await stream_handler.write_chunk(thread_id, text, sequence) + + self._sequence_numbers[thread_id] += 1 + + logger.debug( + "[%s][%s] Wrote chunk to Redis: seq=%d, text=%s", + context.agent_name, + thread_id[:8], + sequence, + text, + ) + except Exception as ex: + logger.error(f"Error writing to Redis stream: {ex}", exc_info=True) + + async def on_agent_response(self, response: object, context: AgentCallbackContext) -> None: + """Write end-of-stream marker when agent completes. + + Args: + response: The final agent response. + context: The callback context. + """ + thread_id = context.thread_id + if not thread_id: + return + + sequence = self._sequence_numbers.get(thread_id, 0) + + try: + # Use context manager to ensure Redis client is properly closed + async with await get_stream_handler() as stream_handler: + # Write end-of-stream marker using public API + await stream_handler.write_completion(thread_id, sequence) + + logger.info( + "[%s][%s] Agent completed, wrote end-of-stream marker", + context.agent_name, + thread_id[:8], + ) + + # Clean up sequence tracker + self._sequence_numbers.pop(thread_id, None) + except Exception as ex: + logger.error(f"Error writing end-of-stream marker: {ex}", exc_info=True) + + +def create_travel_agent(): + """Create the TravelPlanner agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured TravelPlanner agent with travel planning tools. + """ + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name="TravelPlanner", + instructions="""You are an expert travel planner who creates detailed, personalized travel itineraries. +When asked to plan a trip, you should: +1. Create a comprehensive day-by-day itinerary +2. Include specific recommendations for activities, restaurants, and attractions +3. Provide practical tips for each destination +4. Consider weather and local events when making recommendations +5. Include estimated times and logistics between activities + +Always use the available tools to get current weather forecasts and local events +for the destination to make your recommendations more relevant and timely. + +Format your response with clear headings for each day and include emoji icons +to make the itinerary easy to scan and visually appealing.""", + tools=[get_weather_forecast, get_local_events], + ) + + +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional log handler for worker logging + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with the TravelPlanner agent and Redis streaming callback. + + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agent and callback registered + """ + # Create the Redis streaming callback + redis_callback = RedisStreamCallback() + + # Wrap it with the agent worker + agent_worker = DurableAIAgentWorker(worker, callback=redis_callback) + + # Create and register the TravelPlanner agent + logger.debug("Creating and registering TravelPlanner agent...") + travel_agent = create_travel_agent() + agent_worker.add_agent(travel_agent) + + logger.debug(f"✓ Registered agent: {travel_agent.name}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Agent Worker with Redis Streaming...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agent and callback + setup_worker(worker) + + # Start the worker + logger.debug("Worker started and listening for requests...") + worker.start() + + try: + # Keep the worker running + while True: + await asyncio.sleep(1) + except KeyboardInterrupt: + logger.debug("Worker shutting down...") + finally: + worker.stop() + logger.debug("Worker stopped") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/README.md b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/README.md index 090fdec3b9..3a5605b3dd 100644 --- a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/README.md +++ b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/README.md @@ -1,4 +1,4 @@ -# Single Agent Orchestration Chaining Sample +# Single Agent Orchestration Chaining This sample demonstrates how to chain multiple invocations of the same agent using a durable orchestration while preserving conversation state between runs. @@ -15,32 +15,30 @@ See the [README.md](../README.md) file in the parent directory for more informat ## Running the Sample -With the environment setup, you can run the sample using one of two approaches: +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: -### Option 1: Combined Worker + Client (Quick Start) +**Option 1: Combined (Recommended for Testing)** ```bash cd samples/getting_started/durabletask/04_single_agent_orchestration_chaining python sample.py ``` -This runs both worker and client in a single process. +**Option 2: Separate Processes** -### Option 2: Separate Worker and Client - -**Start the worker in one terminal:** +Start the worker in one terminal: ```bash python worker.py ``` -**In a new terminal, run the client:** +In a new terminal, run the client: ```bash python client.py ``` -The orchestration will execute the writer agent twice sequentially, and you'll see output like: +The orchestration will execute the writer agent twice sequentially: ``` [Orchestration] Starting single agent chaining... @@ -62,11 +60,9 @@ Each small step forward brings you closer to mastery and growth. You can view the state of the orchestration in the Durable Task Scheduler dashboard: 1. Open your browser and navigate to `http://localhost:8082` -2. In the dashboard, you can view the orchestration instance, including: +2. In the dashboard, you can view: - The sequential execution of both agent runs - The conversation thread shared between runs - Input and output at each step - Overall orchestration state and history -The orchestration maintains the conversation context across both agent invocations, demonstrating how durable orchestrations can coordinate multi-step agent workflows. - diff --git a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/client.py b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/client.py index 1b5331e47e..6f737eefc0 100644 --- a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/client.py +++ b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/client.py @@ -24,80 +24,93 @@ logger = logging.getLogger(__name__) -async def main() -> None: - """Main entry point for the client application.""" - logger.info("Starting Durable Task Single Agent Chaining Orchestration Client...") +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerClient: + """Create a configured DurableTaskSchedulerClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") + return DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() +def run_client(client: DurableTaskSchedulerClient) -> None: + """Run client to start and monitor the orchestration. - # Create a client using Azure Managed Durable Task - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", - taskhub=taskhub_name, - token_credential=credential + Args: + client: The DurableTaskSchedulerClient instance + """ + logger.debug("Starting single agent chaining orchestration...") + + # Start the orchestration + instance_id = client.schedule_new_orchestration( # type: ignore + orchestrator="single_agent_chaining_orchestration", + input="", ) - logger.info("Starting single agent chaining orchestration...") - logger.info("") + logger.info(f"Orchestration started with instance ID: {instance_id}") + logger.debug("Waiting for orchestration to complete...") - try: - # Start the orchestration - instance_id = client.schedule_new_orchestration( - orchestrator="single_agent_chaining_orchestration", - input="", - ) - - logger.info(f"Orchestration started with instance ID: {instance_id}") - logger.info("Waiting for orchestration to complete...") - logger.info("") + # Retrieve the final state + metadata = client.wait_for_orchestration_completion( + instance_id=instance_id, + timeout=300 + ) + + if metadata and metadata.runtime_status.name == "COMPLETED": + result = metadata.serialized_output - # Retrieve the final state - metadata = client.wait_for_orchestration_completion( - instance_id=instance_id, - timeout=300 - ) + logger.debug("Orchestration completed successfully!") - if metadata and metadata.runtime_status.name == "COMPLETED": - result = metadata.serialized_output - - logger.info("=" * 80) - logger.info("Orchestration completed successfully!") - logger.info("=" * 80) - logger.info("") - logger.info("Results:") - logger.info("") - - # Parse and display the result - if result: - final_text = json.loads(result) - logger.info("Final refined sentence:") - logger.info(f" {final_text}") - logger.info("") - - logger.info("=" * 80) - - elif metadata: - logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") - if metadata.serialized_output: - logger.error(f"Output: {metadata.serialized_output}") - else: - logger.error("Orchestration did not complete within the timeout period") + # Parse and display the result + if result: + final_text = json.loads(result) + logger.info("Final refined sentence: %s \n", final_text) + elif metadata: + logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") + if metadata.serialized_output: + logger.error(f"Output: {metadata.serialized_output}") + else: + logger.error("Orchestration did not complete within the timeout period") + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task Single Agent Chaining Orchestration Client...") + + # Create client using helper function + client = get_client() + + try: + run_client(client) except Exception as e: logger.exception(f"Error during orchestration: {e}") finally: - logger.info("") - logger.info("Client shutting down") + logger.debug("") + logger.debug("Client shutting down") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/sample.py b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/sample.py index 1f1ee81deb..208c223f5e 100644 --- a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/sample.py +++ b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/sample.py @@ -19,235 +19,50 @@ python sample.py """ -import asyncio -import json import logging -import os -from collections.abc import Generator -from typing import Any -from agent_framework import AgentRunResponse -from agent_framework.azure import AzureOpenAIChatClient -from agent_framework_durabletask import DurableAIAgentOrchestrationContext, DurableAIAgentWorker -from azure.identity import AzureCliCredential, DefaultAzureCredential from dotenv import load_dotenv -from durabletask.task import OrchestrationContext, Task -from durabletask.azuremanaged.client import DurableTaskSchedulerClient -from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker + +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker # Configure logging -logging.basicConfig(level=logging.INFO) +logging.basicConfig(level=logging.INFO, force=True) logger = logging.getLogger(__name__) -# Agent name -WRITER_AGENT_NAME = "WriterAgent" - - -def create_writer_agent(): - """Create the Writer agent using Azure OpenAI. - - This agent refines short pieces of text, enhancing initial sentences - and polishing improved versions further. - - Returns: - AgentProtocol: The configured Writer agent - """ - instructions = ( - "You refine short pieces of text. When given an initial sentence you enhance it;\n" - "when given an improved sentence you polish it further." - ) - - return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( - name=WRITER_AGENT_NAME, - instructions=instructions, - ) - - -def single_agent_chaining_orchestration( - context: OrchestrationContext, _: str -) -> Generator[Task[Any], Any, str]: - """Orchestration that runs the writer agent twice on the same thread. - - This demonstrates chaining behavior where the output of the first agent run - becomes part of the input for the second run, all while maintaining the - conversation context through a shared thread. - - Args: - context: The orchestration context - _: Input parameter (unused) - - Returns: - str: The final refined text from the second agent run - """ - logger.info("[Orchestration] Starting single agent chaining...") - - # Wrap the orchestration context to access agents - agent_context = DurableAIAgentOrchestrationContext(context) - - # Get the writer agent using the agent context - writer = agent_context.get_agent(WRITER_AGENT_NAME) - - # Create a new thread for the conversation - this will be shared across both runs - writer_thread = writer.get_new_thread() - - logger.info(f"[Orchestration] Created thread: {writer_thread.session_id}") - - # First run: Generate an initial inspirational sentence - logger.info("[Orchestration] First agent run: Generating initial sentence...") - initial_response: AgentRunResponse = yield writer.run( - messages="Write a concise inspirational sentence about learning.", - thread=writer_thread, - ) - logger.info(f"[Orchestration] Initial response: {initial_response.text}") - - # Second run: Refine the initial response on the same thread - improved_prompt = ( - f"Improve this further while keeping it under 25 words: " - f"{initial_response.text}" - ) - - logger.info("[Orchestration] Second agent run: Refining the sentence...") - refined_response: AgentRunResponse = yield writer.run( - messages=improved_prompt, - thread=writer_thread, - ) - - logger.info(f"[Orchestration] Refined response: {refined_response.text}") - - logger.info("[Orchestration] Chaining complete") - return refined_response.text - - -async def run_client( - endpoint: str, taskhub_name: str, credential: DefaultAzureCredential | None -): - """Run the client to start and monitor the orchestration. - - Args: - endpoint: The durable task scheduler endpoint - taskhub_name: The task hub name - credential: The credential for authentication - """ - logger.info("") - logger.info("=" * 80) - logger.info("CLIENT: Starting orchestration...") - logger.info("=" * 80) - logger.info("") - - # Create a client - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", - taskhub=taskhub_name, - token_credential=credential - ) - - try: - # Start the orchestration - instance_id = client.schedule_new_orchestration( - single_agent_chaining_orchestration - ) - - logger.info(f"Orchestration started with instance ID: {instance_id}") - logger.info("Waiting for orchestration to complete...") - logger.info("") - - # Retrieve the final state - metadata = client.wait_for_orchestration_completion( - instance_id=instance_id, - timeout=300 - ) - - if metadata and metadata.runtime_status.name == "COMPLETED": - result = metadata.serialized_output - - logger.info("") - logger.info("=" * 80) - logger.info("ORCHESTRATION COMPLETED SUCCESSFULLY!") - logger.info("=" * 80) - logger.info("") - - # Parse and display the result - if result: - final_text = json.loads(result) - logger.info("Final refined sentence:") - logger.info(f" {final_text}") - else: - logger.warning("No output returned from orchestration") - - elif metadata: - logger.error(f"Orchestration did not complete successfully: {metadata.runtime_status.name}") - if metadata.serialized_output: - logger.error(f"Output: {metadata.serialized_output}") - else: - logger.error("Could not retrieve orchestration metadata") - - except Exception as e: - logger.exception(f"Error during orchestration: {e}") - - logger.info("") - logger.info("Client shutting down") - def main(): """Main entry point - runs both worker and client in single process.""" - logger.info("Starting Single Agent Orchestration Chaining Sample...") - logger.info("") - - # Load environment variables - load_dotenv() - - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() - secure_channel = endpoint != "http://localhost:8080" + logger.debug("Starting Single Agent Orchestration Chaining Sample...") - # Create and start the worker using a context manager - with DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=secure_channel, - taskhub=taskhub_name, - token_credential=credential - ) as worker: - - # Wrap with the agent worker - agent_worker = DurableAIAgentWorker(worker) - - # Create and register the Writer agent - logger.info("Creating and registering Writer agent...") - writer_agent = create_writer_agent() - agent_worker.add_agent(writer_agent) + silent_handler = logging.NullHandler() + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents and orchestrations using helper function + setup_worker(dts_worker) - logger.info(f"✓ Registered agent: {writer_agent.name}") - logger.info(f" Entity name: dafx-{writer_agent.name}") + # Start the worker + dts_worker.start() + logger.debug("Worker started and listening for requests...") - # Register the orchestration function - logger.info("Registering orchestration function...") - worker.add_orchestrator(single_agent_chaining_orchestration) - logger.info("✓ Registered orchestration: single_agent_chaining_orchestration") - logger.info("") + # Create the client using helper function + client = get_client(log_handler=silent_handler) - # Start the worker - worker.start() - logger.info("Worker started and listening for requests...") - logger.info("") + logger.debug("CLIENT: Starting orchestration...") # Run the client in the same process try: - asyncio.run(run_client(endpoint, taskhub_name, credential)) + run_client(client) except KeyboardInterrupt: - logger.info("Sample interrupted by user") + logger.debug("Sample interrupted by user") + except Exception as e: + logger.exception(f"Error during orchestration: {e}") finally: - logger.info("Worker stopping...") + logger.debug("Worker stopping...") - logger.info("Sample completed") + logger.debug("") + logger.debug("Sample completed") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/worker.py b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/worker.py index b033bca155..39cda4596f 100644 --- a/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/worker.py +++ b/python/samples/getting_started/durabletask/04_single_agent_orchestration_chaining/worker.py @@ -14,7 +14,6 @@ from collections.abc import Generator import logging import os -from typing import Any from agent_framework import AgentRunResponse from agent_framework.azure import AzureOpenAIChatClient @@ -51,9 +50,18 @@ def create_writer_agent(): ) +def get_orchestration(): + """Get the orchestration function for this sample. + + Returns: + The orchestration function to register with the worker + """ + return single_agent_chaining_orchestration + + def single_agent_chaining_orchestration( context: OrchestrationContext, _: str -) -> Generator[Task[Any], Any, str]: +) -> Generator[Task[AgentRunResponse], AgentRunResponse, str]: """Orchestration that runs the writer agent twice on the same thread. This demonstrates chaining behavior where the output of the first agent run @@ -64,10 +72,13 @@ def single_agent_chaining_orchestration( context: The orchestration context _: Input parameter (unused) + Yields: + Task[AgentRunResponse]: Tasks that resolve to AgentRunResponse + Returns: str: The final refined text from the second agent run """ - logger.info("[Orchestration] Starting single agent chaining...") + logger.debug("[Orchestration] Starting single agent chaining...") # Wrap the orchestration context to access agents agent_context = DurableAIAgentOrchestrationContext(context) @@ -78,12 +89,13 @@ def single_agent_chaining_orchestration( # Create a new thread for the conversation - this will be shared across both runs writer_thread = writer.get_new_thread() - logger.info(f"[Orchestration] Created thread: {writer_thread.session_id}") + logger.debug(f"[Orchestration] Created thread: {writer_thread.session_id}") + prompt = "Write a concise inspirational sentence about learning." # First run: Generate an initial inspirational sentence - logger.info("[Orchestration] First agent run: Generating initial sentence...") - initial_response: AgentRunResponse = yield writer.run( - messages="Write a concise inspirational sentence about learning.", + logger.info("[Orchestration] First agent run: Generating initial sentence about: %s", prompt) + initial_response = yield writer.run( + messages=prompt, thread=writer_thread, ) logger.info(f"[Orchestration] Initial response: {initial_response.text}") @@ -94,61 +106,89 @@ def single_agent_chaining_orchestration( f"{initial_response.text}" ) - logger.info("[Orchestration] Second agent run: Refining the sentence...") - refined_response: AgentRunResponse = yield writer.run( + logger.info("[Orchestration] Second agent run: Refining the sentence: %s", improved_prompt) + refined_response = yield writer.run( messages=improved_prompt, thread=writer_thread, ) logger.info(f"[Orchestration] Refined response: {refined_response.text}") - logger.info("[Orchestration] Chaining complete") + logger.debug("[Orchestration] Chaining complete") return refined_response.text -async def main(): - """Main entry point for the worker process.""" - logger.info("Starting Durable Task Single Agent Chaining Worker with Orchestration...") +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") - # Create a worker using Azure Managed Durable Task - worker = DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", taskhub=taskhub_name, - token_credential=credential + token_credential=credential, + log_handler=log_handler ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with agents and orchestrations registered. + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents and orchestrations registered + """ # Wrap it with the agent worker agent_worker = DurableAIAgentWorker(worker) # Create and register the Writer agent - logger.info("Creating and registering Writer agent...") + logger.debug("Creating and registering Writer agent...") writer_agent = create_writer_agent() agent_worker.add_agent(writer_agent) - logger.info(f"✓ Registered agent: {writer_agent.name}") - logger.info(f" Entity name: dafx-{writer_agent.name}") - logger.info("") + logger.debug(f"✓ Registered agent: {writer_agent.name}") # Register the orchestration function - logger.info("Registering orchestration function...") - worker.add_orchestrator(single_agent_chaining_orchestration) - logger.info(f"✓ Registered orchestration: {single_agent_chaining_orchestration.__name__}") - logger.info("") + logger.debug("Registering orchestration function...") + worker.add_orchestrator(single_agent_chaining_orchestration) # type: ignore + logger.debug(f"✓ Registered orchestration: {single_agent_chaining_orchestration.__name__}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Single Agent Chaining Worker with Orchestration...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents and orchestrations + setup_worker(worker) - logger.info("Worker is ready and listening for requests...") - logger.info("Press Ctrl+C to stop.") - logger.info("") + logger.debug("Worker is ready and listening for requests...") + logger.debug("Press Ctrl+C to stop.") try: # Start the worker (this blocks until stopped) @@ -158,9 +198,9 @@ async def main(): while True: await asyncio.sleep(1) except KeyboardInterrupt: - logger.info("Worker shutdown initiated") + logger.debug("Worker shutdown initiated") - logger.info("Worker stopped") + logger.debug("Worker stopped") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/README.md b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/README.md index 89efdb5e8d..0edf244d78 100644 --- a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/README.md +++ b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/README.md @@ -1,4 +1,4 @@ -# Multi-Agent Orchestration with Concurrency Sample +# Multi-Agent Orchestration with Concurrency This sample demonstrates how to host multiple agents and run them concurrently using a durable orchestration, aggregating their responses into a single result. @@ -15,38 +15,37 @@ See the [README.md](../README.md) file in the parent directory for more informat ## Running the Sample -With the environment setup, you can run the sample using one of two approaches: +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: -### Option 1: Combined Worker + Client (Quick Start) +**Option 1: Combined (Recommended for Testing)** ```bash cd samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency python sample.py ``` -This runs both worker and client in a single process. +**Option 2: Separate Processes** -### Option 2: Separate Worker and Client - -**Start the worker in one terminal:** +Start the worker in one terminal: ```bash python worker.py ``` -**In a new terminal, run the client:** +In a new terminal, run the client: ```bash python client.py ``` -The orchestration will execute both agents concurrently, and you'll see output like: +The orchestration will execute both agents concurrently: ``` Prompt: What is temperature? Starting multi-agent concurrent orchestration... Orchestration started with instance ID: abc123... +⚡ Running PhysicistAgent and ChemistAgent in parallel... Orchestration status: COMPLETED Results: @@ -63,13 +62,10 @@ Chemist's response: You can view the state of the orchestration in the Durable Task Scheduler dashboard: 1. Open your browser and navigate to `http://localhost:8082` -2. In the dashboard, you can view the orchestration instance, including: - - The concurrent execution of both agents (Physicist and Chemist) +2. In the dashboard, you can view: + - The concurrent execution of both agents (PhysicistAgent and ChemistAgent) - Separate conversation threads for each agent - Parallel task execution and completion timing - Aggregated results from both agents - - Overall orchestration state and history - -The orchestration demonstrates how multiple agents can be executed in parallel, with results collected and aggregated once all agents complete. diff --git a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/client.py b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/client.py index 2e517a8b6a..2b1fea8b57 100644 --- a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/client.py +++ b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/client.py @@ -24,90 +24,90 @@ logger = logging.getLogger(__name__) -async def main() -> None: - """Main entry point for the client application.""" - logger.info("Starting Durable Task Multi-Agent Orchestration Client...") +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerClient: + """Create a configured DurableTaskSchedulerClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") + return DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() +def run_client(client: DurableTaskSchedulerClient, prompt: str = "What is temperature?") -> None: + """Run client to start and monitor the orchestration. - # Create a client using Azure Managed Durable Task - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", - taskhub=taskhub_name, - token_credential=credential + Args: + client: The DurableTaskSchedulerClient instance + prompt: The prompt to send to both agents + """ + # Start the orchestration with the prompt as input + instance_id = client.schedule_new_orchestration( # type: ignore + orchestrator="multi_agent_concurrent_orchestration", + input=prompt, ) - # Define the prompt to send to both agents - prompt = "What is temperature?" + logger.info(f"Orchestration started with instance ID: {instance_id}") + logger.debug("Waiting for orchestration to complete...") - logger.info(f"Prompt: {prompt}") - logger.info("") - logger.info("Starting multi-agent concurrent orchestration...") + # Retrieve the final state + metadata = client.wait_for_orchestration_completion( + instance_id=instance_id, + ) - try: - # Start the orchestration with the prompt as input - instance_id = client.schedule_new_orchestration( - orchestrator="multi_agent_concurrent_orchestration", - input=prompt, - ) + if metadata and metadata.runtime_status.name == "COMPLETED": + result = metadata.serialized_output - logger.info(f"Orchestration started with instance ID: {instance_id}") - logger.info("Waiting for orchestration to complete...") - logger.info("") - - # Retrieve the final state - metadata = client.wait_for_orchestration_completion( - instance_id=instance_id, - ) - - if metadata and metadata.runtime_status.name == "COMPLETED": - result = metadata.serialized_output - - logger.info("=" * 80) - logger.info("Orchestration completed successfully!") - logger.info("=" * 80) - logger.info("") - logger.info(f"Prompt: {prompt}") - logger.info("") - logger.info("Results:") - logger.info("") - - # Parse and display the result - if result: - result_dict = json.loads(result) - - logger.info("Physicist's response:") - logger.info(f" {result_dict.get('physicist', 'N/A')}") - logger.info("") + logger.debug("Orchestration completed successfully!") - logger.info("Chemist's response:") - logger.info(f" {result_dict.get('chemist', 'N/A')}") - logger.info("") - - logger.info("=" * 80) - - elif metadata: - logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") - if metadata.serialized_output: - logger.error(f"Output: {metadata.serialized_output}") - else: - logger.error("Orchestration did not complete within the timeout period") + # Parse and display the result + if result: + result_json = json.loads(result) if isinstance(result, str) else result + logger.info("Orchestration Results:\n%s", json.dumps(result_json, indent=2)) + elif metadata: + logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") + if metadata.serialized_output: + logger.error(f"Output: {metadata.serialized_output}") + else: + logger.error("Orchestration did not complete within the timeout period") + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task Multi-Agent Orchestration Client...") + + # Create client using helper function + client = get_client() + + try: + run_client(client) except Exception as e: logger.exception(f"Error during orchestration: {e}") finally: - logger.info("") - logger.info("Client shutting down") + logger.debug("Client shutting down") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/sample.py b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/sample.py index 1f6723695e..ca80aa043e 100644 --- a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/sample.py +++ b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/sample.py @@ -16,249 +16,47 @@ python sample.py """ -import asyncio -import json import logging -import os -from collections.abc import Generator -from typing import Any -from agent_framework import AgentRunResponse -from agent_framework.azure import AzureOpenAIChatClient -from agent_framework_durabletask import DurableAIAgentOrchestrationContext, DurableAIAgentWorker -from azure.identity import AzureCliCredential, DefaultAzureCredential from dotenv import load_dotenv -from durabletask.task import OrchestrationContext, when_all, Task -from durabletask.azuremanaged.client import DurableTaskSchedulerClient -from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker + +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker # Configure logging -logging.basicConfig(level=logging.INFO) +logging.basicConfig(level=logging.INFO, force=True) logger = logging.getLogger(__name__) -# Agent names -PHYSICIST_AGENT_NAME = "PhysicistAgent" -CHEMIST_AGENT_NAME = "ChemistAgent" - - -def create_physicist_agent(): - """Create the Physicist agent using Azure OpenAI. - - Returns: - AgentProtocol: The configured Physicist agent - """ - return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( - name=PHYSICIST_AGENT_NAME, - instructions="You are an expert in physics. You answer questions from a physics perspective.", - ) - - -def create_chemist_agent(): - """Create the Chemist agent using Azure OpenAI. - - Returns: - AgentProtocol: The configured Chemist agent - """ - return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( - name=CHEMIST_AGENT_NAME, - instructions="You are an expert in chemistry. You answer questions from a chemistry perspective.", - ) - - -def multi_agent_concurrent_orchestration(context: OrchestrationContext, prompt: str) -> Generator[Task[Any], Any, dict[str, str]]: - """Orchestration that runs both agents in parallel and aggregates results. - - Uses DurableAIAgentOrchestrationContext to wrap the orchestration context and - access agents via the OrchestrationAgentExecutor. - - Args: - context: The orchestration context - - Returns: - dict: Dictionary with 'physicist' and 'chemist' response texts - """ - logger.info(f"[Orchestration] Starting concurrent execution for prompt: {prompt}") - - # Wrap the orchestration context to access agents - agent_context = DurableAIAgentOrchestrationContext(context) - - # Get agents using the agent context (returns DurableAIAgent proxies) - physicist = agent_context.get_agent(PHYSICIST_AGENT_NAME) - chemist = agent_context.get_agent(CHEMIST_AGENT_NAME) - - # Create separate threads for each agent - physicist_thread = physicist.get_new_thread() - chemist_thread = chemist.get_new_thread() - - logger.info(f"[Orchestration] Created threads - Physicist: {physicist_thread.session_id}, Chemist: {chemist_thread.session_id}") - - # Create tasks from agent.run() calls - these return DurableAgentTask instances - physicist_task = physicist.run(messages=str(prompt), thread=physicist_thread) - chemist_task = chemist.run(messages=str(prompt), thread=chemist_thread) - - logger.info("[Orchestration] Created agent tasks, executing concurrently...") - - # Execute both tasks concurrently using task.when_all - # The DurableAgentTask instances wrap the underlying entity calls - task_results = yield when_all([physicist_task, chemist_task]) - - logger.info("[Orchestration] Both agents completed") - - # Extract results from the tasks - DurableAgentTask yields AgentRunResponse - physicist_result: AgentRunResponse = task_results[0] - chemist_result: AgentRunResponse = task_results[1] - - result = { - "physicist": physicist_result.text, - "chemist": chemist_result.text, - } - - logger.info(f"[Orchestration] Aggregated results ready") - return result - - -async def run_client(endpoint: str, taskhub_name: str, credential: DefaultAzureCredential | None, prompt: str): - """Run the client to start and monitor the orchestration. - - Args: - endpoint: The durable task scheduler endpoint - taskhub_name: The task hub name - credential: The credential for authentication - prompt: The prompt to send to both agents - """ - logger.info("") - logger.info("=" * 80) - logger.info("CLIENT: Starting orchestration...") - logger.info("=" * 80) - - # Create a client - client = DurableTaskSchedulerClient( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", - taskhub=taskhub_name, - token_credential=credential - ) - - logger.info(f"Prompt: {prompt}") - logger.info("") - - try: - # Start the orchestration with the prompt as input - instance_id = client.schedule_new_orchestration( - multi_agent_concurrent_orchestration, - input=prompt, - ) - - logger.info(f"Orchestration started with instance ID: {instance_id}") - logger.info("Waiting for orchestration to complete...") - logger.info("") - - # Retrieve the final state - metadata = client.wait_for_orchestration_completion( - instance_id=instance_id - ) - - if metadata and metadata.runtime_status.name == "COMPLETED": - result = metadata.serialized_output - - logger.info("") - logger.info("=" * 80) - logger.info("ORCHESTRATION COMPLETED SUCCESSFULLY!") - logger.info("=" * 80) - logger.info("") - logger.info(f"Prompt: {prompt}") - logger.info("") - logger.info("Results:") - logger.info("") - - # Parse and display the result - if result: - result_dict = json.loads(result) - - logger.info("Physicist's response:") - logger.info(f" {result_dict.get('physicist', 'N/A')}") - logger.info("") - - logger.info("Chemist's response:") - logger.info(f" {result_dict.get('chemist', 'N/A')}") - logger.info("") - - logger.info("=" * 80) - - elif metadata: - logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") - if metadata.serialized_output: - logger.error(f"Output: {metadata.serialized_output}") - else: - logger.error("Orchestration did not complete within the timeout period") - - except Exception as e: - logger.exception(f"Error during orchestration: {e}") - def main(): """Main entry point - runs both worker and client in single process.""" - logger.info("Starting Durable Task Multi-Agent Orchestration Sample (Combined Worker + Client)...") + logger.debug("Starting Durable Task Multi-Agent Orchestration Sample (Combined Worker + Client)...") - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - logger.info("") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() - secure_channel = endpoint != "http://localhost:8080" - - # Create and start the worker using a context manager - with DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=secure_channel, - taskhub=taskhub_name, - token_credential=credential - ) as worker: - - # Wrap with the agent worker - agent_worker = DurableAIAgentWorker(worker) - - # Create and register both agents - logger.info("Creating and registering agents...") - physicist_agent = create_physicist_agent() - chemist_agent = create_chemist_agent() - - agent_worker.add_agent(physicist_agent) - agent_worker.add_agent(chemist_agent) - - logger.info(f"✓ Registered agent: {physicist_agent.name}") - logger.info(f" Entity name: dafx-{physicist_agent.name}") - logger.info(f"✓ Registered agent: {chemist_agent.name}") - logger.info(f" Entity name: dafx-{chemist_agent.name}") - logger.info("") - - # Register the orchestration function - logger.info("Registering orchestration function...") - worker.add_orchestrator(multi_agent_concurrent_orchestration) - logger.info(f"✓ Registered orchestration: {multi_agent_concurrent_orchestration.__name__}") - logger.info("") + silent_handler = logging.NullHandler() + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents and orchestrations using helper function + setup_worker(dts_worker) # Start the worker - worker.start() - logger.info("Worker started and listening for requests...") + dts_worker.start() + logger.debug("Worker started and listening for requests...") + + # Create the client using helper function + client = get_client(log_handler=silent_handler) # Define the prompt prompt = "What is temperature?" - + logger.debug("CLIENT: Starting orchestration...") + try: # Run the client to start the orchestration - asyncio.run(run_client(endpoint, taskhub_name, credential, prompt)) - + run_client(client, prompt) except Exception as e: logger.exception(f"Error during sample execution: {e}") - logger.info("") - logger.info("Sample completed. Worker shutting down...") + logger.debug("Sample completed. Worker shutting down...") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/worker.py b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/worker.py index bb35b5a8d4..9c4dccd18f 100644 --- a/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/worker.py +++ b/python/samples/getting_started/durabletask/05_multi_agent_orchestration_concurrency/worker.py @@ -82,19 +82,19 @@ def multi_agent_concurrent_orchestration(context: OrchestrationContext, prompt: physicist_thread = physicist.get_new_thread() chemist_thread = chemist.get_new_thread() - logger.info(f"[Orchestration] Created threads - Physicist: {physicist_thread.session_id}, Chemist: {chemist_thread.session_id}") + logger.debug(f"[Orchestration] Created threads - Physicist: {physicist_thread.session_id}, Chemist: {chemist_thread.session_id}") # Create tasks from agent.run() calls - these return DurableAgentTask instances physicist_task = physicist.run(messages=str(prompt), thread=physicist_thread) chemist_task = chemist.run(messages=str(prompt), thread=chemist_thread) - logger.info("[Orchestration] Created agent tasks, executing concurrently...") + logger.debug("[Orchestration] Created agent tasks, executing concurrently...") # Execute both tasks concurrently using when_all # The DurableAgentTask instances wrap the underlying entity calls task_results = yield when_all([physicist_task, chemist_task]) - logger.info("[Orchestration] Both agents completed") + logger.debug("[Orchestration] Both agents completed") # Extract results from the tasks - DurableAgentTask yields AgentRunResponse physicist_result: AgentRunResponse = task_results[0] @@ -105,58 +105,84 @@ def multi_agent_concurrent_orchestration(context: OrchestrationContext, prompt: "chemist": chemist_result.text, } - logger.info(f"[Orchestration] Aggregated results ready") + logger.debug(f"[Orchestration] Aggregated results ready") return result -async def main(): - """Main entry point for the worker process.""" - logger.info("Starting Durable Task Multi-Agent Worker with Orchestration...") +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. - # Get environment variables for taskhub and endpoint with defaults - taskhub_name = os.getenv("TASKHUB", "default") - endpoint = os.getenv("ENDPOINT", "http://localhost:8080") - - logger.info(f"Using taskhub: {taskhub_name}") - logger.info(f"Using endpoint: {endpoint}") - - # Set credential to None for emulator, or DefaultAzureCredential for Azure - credential = None if endpoint == "http://localhost:8080" else DefaultAzureCredential() + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") - # Create a worker using Azure Managed Durable Task - worker = DurableTaskSchedulerWorker( - host_address=endpoint, - secure_channel=endpoint != "http://localhost:8080", + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", taskhub=taskhub_name, - token_credential=credential + token_credential=credential, + log_handler=log_handler ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with agents and orchestrations registered. + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents and orchestrations registered + """ # Wrap it with the agent worker agent_worker = DurableAIAgentWorker(worker) # Create and register both agents - logger.info("Creating and registering agents...") + logger.debug("Creating and registering agents...") physicist_agent = create_physicist_agent() chemist_agent = create_chemist_agent() agent_worker.add_agent(physicist_agent) agent_worker.add_agent(chemist_agent) - logger.info(f"✓ Registered agent: {physicist_agent.name}") - logger.info(f" Entity name: dafx-{physicist_agent.name}") - logger.info(f"✓ Registered agent: {chemist_agent.name}") - logger.info(f" Entity name: dafx-{chemist_agent.name}") - logger.info("") + logger.debug(f"✓ Registered agents: {physicist_agent.name}, {chemist_agent.name}") # Register the orchestration function - logger.info("Registering orchestration function...") - worker.add_orchestrator(multi_agent_concurrent_orchestration) - logger.info(f"✓ Registered orchestration: {multi_agent_concurrent_orchestration.__name__}") - logger.info("") + logger.debug("Registering orchestration function...") + worker.add_orchestrator(multi_agent_concurrent_orchestration) # type: ignore + logger.debug(f"✓ Registered orchestration: {multi_agent_concurrent_orchestration.__name__}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Multi-Agent Worker with Orchestration...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents and orchestrations + setup_worker(worker) - logger.info("Worker is ready and listening for requests...") - logger.info("Press Ctrl+C to stop.") - logger.info("") + logger.debug("Worker is ready and listening for requests...") + logger.debug("Press Ctrl+C to stop.") try: # Start the worker (this blocks until stopped) @@ -166,9 +192,9 @@ async def main(): while True: await asyncio.sleep(1) except KeyboardInterrupt: - logger.info("Worker shutdown initiated") + logger.debug("Worker shutdown initiated") - logger.info("Worker stopped") + logger.debug("Worker stopped") if __name__ == "__main__": diff --git a/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/README.md b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/README.md new file mode 100644 index 0000000000..03b5df1b05 --- /dev/null +++ b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/README.md @@ -0,0 +1,84 @@ +# Multi-Agent Orchestration with Conditionals + +This sample demonstrates conditional orchestration logic with two agents that analyze incoming emails and route execution based on spam detection results. + +## Key Concepts Demonstrated + +- Multi-agent orchestration with two specialized agents (SpamDetectionAgent and EmailAssistantAgent). +- Conditional branching with different execution paths based on spam detection results. +- Structured outputs using Pydantic models with `response_format` for type-safe agent responses. +- Activity functions for side effects (spam handling and email sending). +- Decision-based routing where orchestration logic branches on agent output. + +## Environment Setup + +See the [README.md](../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the Sample + +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: + +**Option 1: Combined (Recommended for Testing)** + +```bash +cd samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals +python sample.py +``` + +**Option 2: Separate Processes** + +Start the worker in one terminal: + +```bash +python worker.py +``` + +In a new terminal, run the client: + +```bash +python client.py +``` + +The sample runs two test cases: + +**Test 1: Legitimate Email** +``` +Email ID: email-001 +Email Content: Hello! I wanted to reach out about our upcoming project meeting... + +🔍 SpamDetectionAgent: Analyzing email... +✓ Not spam - routing to EmailAssistantAgent + +📧 EmailAssistantAgent: Drafting response... +✓ Email sent: [Professional response drafted by EmailAssistantAgent] +``` + +**Test 2: Spam Email** +``` +Email ID: email-002 +Email Content: URGENT! You've won $1,000,000! Click here now... + +🔍 SpamDetectionAgent: Analyzing email... +⚠️ Spam detected: [Reason from SpamDetectionAgent] +✓ Email marked as spam and handled +``` + +## How It Works + +1. **Input Validation**: Orchestration validates email payload using Pydantic models. +2. **Spam Detection**: SpamDetectionAgent analyzes email content. +3. **Conditional Routing**: + - If spam: Calls `handle_spam_email` activity + - If legitimate: Runs EmailAssistantAgent and calls `send_email` activity +4. **Result**: Returns confirmation message from the appropriate activity. + +## Viewing Agent State + +You can view the state of both agents and orchestration in the Durable Task Scheduler dashboard: + +1. Open your browser and navigate to `http://localhost:8082` +2. In the dashboard, you can view: + - Orchestration instance status and history + - SpamDetectionAgent and EmailAssistantAgent entity states + - Activity execution logs + - Decision branch paths taken diff --git a/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/client.py b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/client.py new file mode 100644 index 0000000000..cb2c357bd4 --- /dev/null +++ b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/client.py @@ -0,0 +1,145 @@ +"""Client application for starting a spam detection orchestration. + +This client connects to the Durable Task Scheduler and starts an orchestration +that uses conditional logic to either handle spam emails or draft professional responses. + +Prerequisites: +- The worker must be running with both agents, orchestration, and activities registered +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running +""" + +import asyncio +import logging +import os + +from azure.identity import DefaultAzureCredential +from durabletask.azuremanaged.client import DurableTaskSchedulerClient + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerClient: + """Create a configured DurableTaskSchedulerClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def run_client( + client: DurableTaskSchedulerClient, + email_id: str = "email-001", + email_content: str = "Hello! I wanted to reach out about our upcoming project meeting." +) -> None: + """Run client to start and monitor the spam detection orchestration. + + Args: + client: The DurableTaskSchedulerClient instance + email_id: The email ID + email_content: The email content to analyze + """ + payload = { + "email_id": email_id, + "email_content": email_content, + } + + logger.debug("Starting spam detection orchestration...") + + # Start the orchestration with the email payload + instance_id = client.schedule_new_orchestration( # type: ignore + orchestrator="spam_detection_orchestration", + input=payload, + ) + + logger.debug(f"Orchestration started with instance ID: {instance_id}") + logger.debug("Waiting for orchestration to complete...") + + # Retrieve the final state + metadata = client.wait_for_orchestration_completion( + instance_id=instance_id, + timeout=300 + ) + + if metadata and metadata.runtime_status.name == "COMPLETED": + result = metadata.serialized_output + + logger.debug("Orchestration completed successfully!") + + # Parse and display the result + if result: + # Remove quotes if present + if result.startswith('"') and result.endswith('"'): + result = result[1:-1] + logger.info(f"Result: {result}") + + elif metadata: + logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") + if metadata.serialized_output: + logger.error(f"Output: {metadata.serialized_output}") + else: + logger.error("Orchestration did not complete within the timeout period") + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task Spam Detection Orchestration Client...") + + # Create client using helper function + client = get_client() + + try: + # Test with a legitimate email + logger.info("TEST 1: Legitimate Email") + + run_client( + client, + email_id="email-001", + email_content="Hello! I wanted to reach out about our upcoming project meeting scheduled for next week." + ) + + # Test with a spam email + logger.info("TEST 2: Spam Email") + + run_client( + client, + email_id="email-002", + email_content="URGENT! You've won $1,000,000! Click here now to claim your prize! Limited time offer! Don't miss out!" + ) + + except Exception as e: + logger.exception(f"Error during orchestration: {e}") + finally: + logger.debug("") + logger.debug("Client shutting down") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/requirements.txt b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/requirements.txt new file mode 100644 index 0000000000..371b9e3b79 --- /dev/null +++ b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/requirements.txt @@ -0,0 +1,6 @@ +# Agent Framework packages (installing from local package until a package is published) +-e ../../../../ +-e ../../../../packages/durabletask + +# Azure authentication +azure-identity diff --git a/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/sample.py b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/sample.py new file mode 100644 index 0000000000..d8e9d0a4b3 --- /dev/null +++ b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/sample.py @@ -0,0 +1,79 @@ +"""Multi-Agent Orchestration with Conditionals Sample - Durable Task Integration + +This sample demonstrates conditional orchestration logic with two agents: +- SpamDetectionAgent: Analyzes emails for spam content +- EmailAssistantAgent: Drafts professional responses to legitimate emails + +The orchestration branches based on spam detection results, calling different +activity functions to handle spam or send legitimate email responses. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running (e.g., using Docker) + +To run this sample: + python sample.py +""" + +import logging + +from dotenv import load_dotenv + +# Import helper functions from worker and client modules +from client import get_client, run_client +from worker import get_worker, setup_worker + +logging.basicConfig( + level=logging.INFO, + force=True +) +logger = logging.getLogger() + + +def main(): + """Main entry point - runs both worker and client in single process.""" + logger.debug("Starting Durable Task Spam Detection Orchestration Sample (Combined Worker + Client)...") + + silent_handler = logging.NullHandler() + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agents, orchestrations, and activities using helper function + setup_worker(dts_worker) + + # Start the worker + dts_worker.start() + logger.debug("Worker started and listening for requests...") + + # Create the client using helper function + client = get_client(log_handler=silent_handler) + logger.debug("CLIENT: Starting orchestration tests...") + + try: + # Test 1: Legitimate email + # logger.info("TEST 1: Legitimate Email") + + run_client( + client, + email_id="email-001", + email_content="Hello! I wanted to reach out about our upcoming project meeting scheduled for next week." + ) + + # Test 2: Spam email + logger.info("TEST 2: Spam Email") + + run_client( + client, + email_id="email-002", + email_content="URGENT! You've won $1,000,000! Click here now to claim your prize! Limited time offer! Don't miss out!" + ) + + except Exception as e: + logger.exception(f"Error during sample execution: {e}") + + logger.debug("Sample completed. Worker shutting down...") + + +if __name__ == "__main__": + load_dotenv() + main() diff --git a/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/worker.py b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/worker.py new file mode 100644 index 0000000000..c512d76fad --- /dev/null +++ b/python/samples/getting_started/durabletask/06_multi_agent_orchestration_conditionals/worker.py @@ -0,0 +1,292 @@ +"""Worker process for hosting spam detection and email assistant agents with conditional orchestration. + +This worker registers two domain-specific agents (spam detector and email assistant) and an +orchestration function that routes execution based on spam detection results. Activity functions +handle side effects (spam handling and email sending). + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Start a Durable Task Scheduler (e.g., using Docker) +""" + +import asyncio +from collections.abc import Generator +import logging +import os +from typing import Any, cast + +from agent_framework import AgentRunResponse +from agent_framework.azure import AzureOpenAIChatClient +from agent_framework_durabletask import DurableAIAgentOrchestrationContext, DurableAIAgentWorker +from azure.identity import AzureCliCredential, DefaultAzureCredential +from durabletask.task import ActivityContext, OrchestrationContext, Task +from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker +from pydantic import BaseModel, ValidationError + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Agent names +SPAM_AGENT_NAME = "SpamDetectionAgent" +EMAIL_AGENT_NAME = "EmailAssistantAgent" + + +class SpamDetectionResult(BaseModel): + """Result from spam detection agent.""" + is_spam: bool + reason: str + + +class EmailResponse(BaseModel): + """Result from email assistant agent.""" + response: str + + +class EmailPayload(BaseModel): + """Input payload for the orchestration.""" + email_id: str + email_content: str + + +def create_spam_agent(): + """Create the Spam Detection agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured Spam Detection agent + """ + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name=SPAM_AGENT_NAME, + instructions="You are a spam detection assistant that identifies spam emails.", + ) + + +def create_email_agent(): + """Create the Email Assistant agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured Email Assistant agent + """ + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name=EMAIL_AGENT_NAME, + instructions="You are an email assistant that helps users draft responses to emails with professionalism.", + ) + + +def handle_spam_email(context: ActivityContext, reason: str) -> str: + """Activity function to handle spam emails. + + Args: + context: The activity context + reason: The reason why the email was marked as spam + + Returns: + str: Confirmation message + """ + logger.debug(f"[Activity] Handling spam email: {reason}") + return f"Email marked as spam: {reason}" + + +def send_email(context: ActivityContext, message: str) -> str: + """Activity function to send emails. + + Args: + context: The activity context + message: The email message to send + + Returns: + str: Confirmation message + """ + logger.debug(f"[Activity] Sending email: {message[:50]}...") + return f"Email sent: {message}" + + +def spam_detection_orchestration(context: OrchestrationContext, payload_raw: Any) -> Generator[Task[Any], Any, str]: + """Orchestration that detects spam and conditionally drafts email responses. + + This orchestration: + 1. Validates the input payload + 2. Runs the spam detection agent + 3. If spam: calls handle_spam_email activity + 4. If legitimate: runs email assistant agent and calls send_email activity + + Args: + context: The orchestration context + payload_raw: The input payload dictionary + + Returns: + str: Result message from activity functions + """ + logger.debug("[Orchestration] Starting spam detection orchestration") + + # Validate input + if not isinstance(payload_raw, dict): + raise ValueError("Email data is required") + + try: + payload = EmailPayload.model_validate(payload_raw) + except ValidationError as exc: + raise ValueError(f"Invalid email payload: {exc}") from exc + + logger.debug(f"[Orchestration] Processing email ID: {payload.email_id}") + + # Wrap the orchestration context to access agents + agent_context = DurableAIAgentOrchestrationContext(context) + + # Get spam detection agent + spam_agent = agent_context.get_agent(SPAM_AGENT_NAME) + + # Run spam detection + spam_prompt = ( + "Analyze this email for spam content and return a JSON response with 'is_spam' (boolean) " + "and 'reason' (string) fields:\n" + f"Email ID: {payload.email_id}\n" + f"Content: {payload.email_content}" + ) + + logger.info("[Orchestration] Running spam detection agent: %s", spam_prompt) + spam_result_task = spam_agent.run( + messages=spam_prompt, + response_format=SpamDetectionResult, + ) + + spam_result_raw: AgentRunResponse = yield spam_result_task + spam_result = cast(SpamDetectionResult, spam_result_raw.value) + + logger.info("[Orchestration] Spam detection result: is_spam=%s", spam_result.is_spam) + + # Branch based on spam detection result + if spam_result.is_spam: + logger.debug("[Orchestration] Email is spam, handling...") + result_task: Task[str] = context.call_activity("handle_spam_email", input=spam_result.reason) + result: str = yield result_task + return result + + # Email is legitimate - draft a response + logger.debug("[Orchestration] Email is legitimate, drafting response...") + + email_agent = agent_context.get_agent(EMAIL_AGENT_NAME) + + email_prompt = ( + "Draft a professional response to this email. Return a JSON response with a 'response' field " + "containing the reply:\n\n" + f"Email ID: {payload.email_id}\n" + f"Content: {payload.email_content}" + ) + + logger.info("[Orchestration] Running email assistant agent: %s", email_prompt) + email_result_task = email_agent.run( + messages=email_prompt, + response_format=EmailResponse, + ) + + email_result_raw: AgentRunResponse = yield email_result_task + email_result = cast(EmailResponse, email_result_raw.value) + + logger.debug("[Orchestration] Email response drafted, sending...") + result_task: Task[str] = context.call_activity("send_email", input=email_result.response) + result: str = yield result_task + + logger.info("Sent Email: %s", result) + + return result + + +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with agents, orchestrations, and activities registered. + + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents, orchestrations, and activities registered + """ + # Wrap it with the agent worker + agent_worker = DurableAIAgentWorker(worker) + + # Create and register both agents + logger.debug("Creating and registering agents...") + spam_agent = create_spam_agent() + email_agent = create_email_agent() + + agent_worker.add_agent(spam_agent) + agent_worker.add_agent(email_agent) + + logger.debug(f"✓ Registered agents: {spam_agent.name}, {email_agent.name}") + + # Register activity functions + logger.debug("Registering activity functions...") + worker.add_activity(handle_spam_email) # type: ignore[arg-type] + worker.add_activity(send_email) # type: ignore[arg-type] + logger.debug(f"✓ Registered activity: handle_spam_email") + logger.debug(f"✓ Registered activity: send_email") + + # Register the orchestration function + logger.debug("Registering orchestration function...") + worker.add_orchestrator(spam_detection_orchestration) # type: ignore[arg-type] + logger.debug(f"✓ Registered orchestration: {spam_detection_orchestration.__name__}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task Spam Detection Worker with Orchestration...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents, orchestrations, and activities + setup_worker(worker) + + logger.debug("Worker is ready and listening for requests...") + logger.debug("Press Ctrl+C to stop.") + + try: + # Start the worker (this blocks until stopped) + worker.start() + + # Keep the worker running + while True: + await asyncio.sleep(1) + except KeyboardInterrupt: + logger.debug("Worker shutdown initiated") + + logger.debug("Worker stopped") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/README.md b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/README.md new file mode 100644 index 0000000000..59f1186b33 --- /dev/null +++ b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/README.md @@ -0,0 +1,87 @@ +# Single-Agent Orchestration with Human-in-the-Loop (HITL) + +This sample demonstrates the human-in-the-loop pattern where a WriterAgent generates content and waits for human approval before publishing. The orchestration handles external events, timeouts, and iterative refinement based on feedback. + +## Key Concepts Demonstrated + +- Human-in-the-loop workflow with orchestration pausing for external approval/rejection events. +- External event handling using `wait_for_external_event()` to receive human input. +- Timeout management with `when_any()` to race between approval event and timeout. +- Iterative refinement where agent regenerates content based on reviewer feedback. +- Structured outputs using Pydantic models with `response_format` for type-safe agent responses. +- Activity functions for notifications and publishing as separate side effects. +- Long-running orchestrations maintaining state across multiple interactions. + +## Environment Setup + +See the [README.md](../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the Sample + +With the environment setup, you can run the sample using the combined approach or separate worker and client processes: + +**Option 1: Combined (Recommended for Testing)** + +```bash +cd samples/getting_started/durabletask/07_single_agent_orchestration_hitl +python sample.py +``` + +**Option 2: Separate Processes** + +Start the worker in one terminal: + +```bash +python worker.py +``` + +In a new terminal, run the client: + +```bash +python client.py +``` + +The sample runs two test scenarios: + +**Test 1: Immediate Approval** +``` +Topic: The benefits of cloud computing +[WriterAgent generates content] +[Notification sent: Please review the content] +[Client sends approval] +✓ Content published successfully +``` + +**Test 2: Rejection with Feedback, Then Approval** +``` +Topic: The future of artificial intelligence +[WriterAgent generates initial content] +[Notification sent: Please review the content] +[Client sends rejection with feedback: "Make it more technical..."] +[WriterAgent regenerates content with feedback] +[Notification sent: Please review the revised content] +[Client sends approval] +✓ Revised content published successfully +``` + +## How It Works + +1. **Initial Generation**: WriterAgent creates content based on the topic. +2. **Review Loop** (up to max_review_attempts): + - Activity notifies user for approval + - Orchestration waits for approval event OR timeout + - **If approved**: Publishes content and returns + - **If rejected**: Incorporates feedback and regenerates + - **If timeout**: Raises TimeoutError +3. **Completion**: Returns published content or error. + +## Viewing Agent State + +You can view the state of the WriterAgent and orchestration in the Durable Task Scheduler dashboard: + +1. Open your browser and navigate to `http://localhost:8082` +2. In the dashboard, you can view: + - Orchestration instance status and pending events + - WriterAgent entity state and conversation threads + - Activity execution logs + - External event history diff --git a/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/client.py b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/client.py new file mode 100644 index 0000000000..0f394d5e7e --- /dev/null +++ b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/client.py @@ -0,0 +1,308 @@ +"""Client application for starting a human-in-the-loop content generation orchestration. + +This client connects to the Durable Task Scheduler and demonstrates the HITL pattern +by starting an orchestration, sending approval/rejection events, and monitoring progress. + +Prerequisites: +- The worker must be running with the agent, orchestration, and activities registered +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running +""" + +import asyncio +import json +import logging +import os +import time + +from azure.identity import DefaultAzureCredential +from durabletask.azuremanaged.client import DurableTaskSchedulerClient +from durabletask.client import OrchestrationState + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Constants +HUMAN_APPROVAL_EVENT = "HumanApproval" + + +def get_client( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerClient: + """Create a configured DurableTaskSchedulerClient. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerClient instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerClient( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def _log_completion_result( + metadata: OrchestrationState | None, +) -> None: + """Log the orchestration completion result. + + Args: + metadata: The orchestration metadata + """ + if metadata and metadata.runtime_status.name == "COMPLETED": + result = metadata.serialized_output + + logger.debug(f"Orchestration completed successfully!") + + if result: + try: + result_dict = json.loads(result) + logger.info("Final Result: %s", json.dumps(result_dict, indent=2)) + except json.JSONDecodeError: + logger.debug(f"Result: {result}") + + elif metadata: + logger.error(f"Orchestration ended with status: {metadata.runtime_status.name}") + if metadata.serialized_output: + logger.error(f"Output: {metadata.serialized_output}") + else: + logger.error("Orchestration did not complete within the timeout period") + + +def _wait_and_log_completion( + client: DurableTaskSchedulerClient, + instance_id: str, + timeout: int = 60 +) -> None: + """Wait for orchestration completion and log the result. + + Args: + client: The DurableTaskSchedulerClient instance + instance_id: The orchestration instance ID + timeout: Maximum time to wait for completion in seconds + """ + logger.debug("Waiting for orchestration to complete...") + metadata = client.wait_for_orchestration_completion( + instance_id=instance_id, + timeout=timeout + ) + + _log_completion_result(metadata) + + +def send_approval( + client: DurableTaskSchedulerClient, + instance_id: str, + approved: bool, + feedback: str = "" +) -> None: + """Send approval or rejection event to the orchestration. + + Args: + client: The DurableTaskSchedulerClient instance + instance_id: The orchestration instance ID + approved: Whether to approve or reject + feedback: Optional feedback message (used when rejected) + """ + approval_data = { + "approved": approved, + "feedback": feedback + } + + logger.debug(f"Sending {'APPROVAL' if approved else 'REJECTION'} to instance {instance_id}") + if feedback: + logger.debug(f"Feedback: {feedback}") + + # Raise the external event + client.raise_orchestration_event( + instance_id=instance_id, + event_name=HUMAN_APPROVAL_EVENT, + data=approval_data + ) + + logger.debug("Event sent successfully") + + +def wait_for_notification( + client: DurableTaskSchedulerClient, + instance_id: str, + timeout_seconds: int = 10 +) -> bool: + """Wait for the orchestration to reach a notification point. + + Polls the orchestration status until it appears to be waiting for approval. + + Args: + client: The DurableTaskSchedulerClient instance + instance_id: The orchestration instance ID + timeout_seconds: Maximum time to wait + + Returns: + True if notification detected, False if timeout + """ + logger.debug("Waiting for orchestration to reach notification point...") + + start_time = time.time() + while time.time() - start_time < timeout_seconds: + try: + metadata = client.get_orchestration_state( + instance_id=instance_id, + ) + + if metadata: + # Check if we're waiting for approval by examining custom status + if metadata.serialized_custom_status: + try: + custom_status = json.loads(metadata.serialized_custom_status) + # Handle both string and dict custom status + status_str = custom_status if isinstance(custom_status, str) else str(custom_status) + if status_str.lower().startswith("requesting human feedback"): + logger.debug("Orchestration is requesting human feedback") + return True + except (json.JSONDecodeError, AttributeError): + # If it's not JSON, treat as plain string + if metadata.serialized_custom_status.lower().startswith("requesting human feedback"): + logger.debug("Orchestration is requesting human feedback") + return True + + # Check for terminal states + if metadata.runtime_status.name == "COMPLETED": + logger.debug("Orchestration already completed") + return False + elif metadata.runtime_status.name == "FAILED": + logger.error("Orchestration failed") + return False + except Exception as e: + logger.debug(f"Status check: {e}") + + time.sleep(1) + + logger.warning("Timeout waiting for notification") + return False + + +def run_interactive_client(client: DurableTaskSchedulerClient) -> None: + """Run an interactive client that prompts for user input and handles approval workflow. + + Args: + client: The DurableTaskSchedulerClient instance + """ + # Get user inputs + logger.debug("Content Generation - Human-in-the-Loop") + + topic = input("Enter the topic for content generation: ").strip() + if not topic: + topic = "The benefits of cloud computing" + logger.info(f"Using default topic: {topic}") + + max_attempts_str = input("Enter max review attempts (default: 3): ").strip() + max_review_attempts = int(max_attempts_str) if max_attempts_str else 3 + + timeout_hours_str = input("Enter approval timeout in hours (default: 5): ").strip() + timeout_hours = float(timeout_hours_str) if timeout_hours_str else 5.0 + approval_timeout_seconds = int(timeout_hours * 3600) + + payload = { + "topic": topic, + "max_review_attempts": max_review_attempts, + "approval_timeout_seconds": approval_timeout_seconds + } + + logger.debug(f"Configuration: Topic={topic}, Max attempts={max_review_attempts}, Timeout={timeout_hours}h") + + # Start the orchestration + logger.debug("Starting content generation orchestration...") + instance_id = client.schedule_new_orchestration( # type: ignore + orchestrator="content_generation_hitl_orchestration", + input=payload, + ) + + logger.info(f"Orchestration started with instance ID: {instance_id}") + + # Review loop + attempt = 1 + while attempt <= max_review_attempts: + logger.info(f"Review Attempt {attempt}/{max_review_attempts}") + + # Wait for orchestration to reach notification point + logger.debug("Waiting for content generation...") + if not wait_for_notification(client, instance_id, timeout_seconds=120): + logger.error("Failed to receive notification. Orchestration may have completed or failed.") + break + + logger.info("Content is ready for review! Please review the content in the worker logs.") + + # Get user decision + while True: + decision = input("Do you approve this content? (yes/no): ").strip().lower() + if decision in ['yes', 'y', 'no', 'n']: + break + logger.info("Please enter 'yes' or 'no'") + + approved = decision in ['yes', 'y'] + + if approved: + logger.debug("Sending approval...") + send_approval(client, instance_id, approved=True) + logger.info("Approval sent. Waiting for orchestration to complete...") + _wait_and_log_completion(client, instance_id, timeout=60) + break + else: + feedback = input("Enter feedback for improvement: ").strip() + if not feedback: + feedback = "Please revise the content." + + logger.debug("Sending rejection with feedback...") + send_approval(client, instance_id, approved=False, feedback=feedback) + logger.info("Rejection sent. Content will be regenerated...") + + attempt += 1 + + if attempt > max_review_attempts: + logger.info(f"Maximum review attempts ({max_review_attempts}) reached.") + _wait_and_log_completion(client, instance_id, timeout=30) + break + + # Small pause before next iteration + time.sleep(2) + + +async def main() -> None: + """Main entry point for the client application.""" + logger.debug("Starting Durable Task HITL Content Generation Client") + + # Create client using helper function + client = get_client() + + try: + run_interactive_client(client) + + except KeyboardInterrupt: + logger.info("Interrupted by user") + except Exception as e: + logger.exception(f"Error during orchestration: {e}") + finally: + logger.debug("Client shutting down") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/requirements.txt b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/requirements.txt new file mode 100644 index 0000000000..371b9e3b79 --- /dev/null +++ b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/requirements.txt @@ -0,0 +1,6 @@ +# Agent Framework packages (installing from local package until a package is published) +-e ../../../../ +-e ../../../../packages/durabletask + +# Azure authentication +azure-identity diff --git a/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/sample.py b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/sample.py new file mode 100644 index 0000000000..5468a70dd3 --- /dev/null +++ b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/sample.py @@ -0,0 +1,64 @@ +"""Human-in-the-Loop Orchestration Sample - Durable Task Integration + +This sample demonstrates the HITL pattern with a WriterAgent that generates content +and waits for human approval. The orchestration handles: +- External event waiting (approval/rejection) +- Timeout handling +- Iterative refinement based on feedback +- Activity functions for notifications and publishing + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Durable Task Scheduler must be running (e.g., using Docker) + +To run this sample: + python sample.py +""" + +import logging + +from dotenv import load_dotenv + +# Import helper functions from worker and client modules +from client import get_client, run_interactive_client +from worker import get_worker, setup_worker + +logging.basicConfig( + level=logging.INFO, + force=True +) +logger = logging.getLogger() + + +def main(): + """Main entry point - runs both worker and client in single process.""" + logger.debug("Starting Durable Task HITL Content Generation Sample (Combined Worker + Client)...") + + silent_handler = logging.NullHandler() + # Create and start the worker using helper function and context manager + with get_worker(log_handler=silent_handler) as dts_worker: + # Register agent, orchestration, and activities using helper function + setup_worker(dts_worker) + + # Start the worker + dts_worker.start() + logger.debug("Worker started and listening for requests...") + + # Create the client using helper function + client = get_client(log_handler=silent_handler) + + try: + logger.debug("CLIENT: Starting orchestration tests...") + + run_interactive_client(client) + + except Exception as e: + logger.exception(f"Error during sample execution: {e}") + + logger.debug("Sample completed. Worker shutting down...") + + +if __name__ == "__main__": + load_dotenv() + main() diff --git a/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/worker.py b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/worker.py new file mode 100644 index 0000000000..76e8e621be --- /dev/null +++ b/python/samples/getting_started/durabletask/07_single_agent_orchestration_hitl/worker.py @@ -0,0 +1,364 @@ +"""Worker process for hosting a writer agent with human-in-the-loop orchestration. + +This worker registers a WriterAgent and an orchestration function that implements +a human-in-the-loop review workflow. The orchestration pauses for external events +(human approval/rejection) with timeout handling, and iterates based on feedback. + +Prerequisites: +- Set AZURE_OPENAI_ENDPOINT and AZURE_OPENAI_CHAT_DEPLOYMENT_NAME + (plus AZURE_OPENAI_API_KEY or Azure CLI authentication) +- Start a Durable Task Scheduler (e.g., using Docker) +""" + +import asyncio +from collections.abc import Generator +from datetime import timedelta +import logging +import os +from typing import Any, cast + +from agent_framework import AgentRunResponse +from agent_framework.azure import AzureOpenAIChatClient +from agent_framework_durabletask import DurableAIAgentOrchestrationContext, DurableAIAgentWorker +from azure.identity import AzureCliCredential, DefaultAzureCredential +from durabletask.task import ActivityContext, OrchestrationContext, Task, when_any # type: ignore +from durabletask.azuremanaged.worker import DurableTaskSchedulerWorker +from pydantic import BaseModel, ValidationError + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Constants +WRITER_AGENT_NAME = "WriterAgent" +HUMAN_APPROVAL_EVENT = "HumanApproval" + + +class ContentGenerationInput(BaseModel): + """Input for content generation orchestration.""" + topic: str + max_review_attempts: int = 3 + approval_timeout_seconds: float = 300 # 5 minutes for demo (72 hours in production) + + +class GeneratedContent(BaseModel): + """Structured output from writer agent.""" + title: str + content: str + + +class HumanApproval(BaseModel): + """Human approval decision.""" + approved: bool + feedback: str = "" + + +def create_writer_agent(): + """Create the Writer agent using Azure OpenAI. + + Returns: + AgentProtocol: The configured Writer agent + """ + instructions = ( + "You are a professional content writer who creates high-quality articles on various topics. " + "You write engaging, informative, and well-structured content that follows best practices for readability and accuracy. " + "Return your response as JSON with 'title' and 'content' fields." + "Limit response to 300 words or less." + ) + + return AzureOpenAIChatClient(credential=AzureCliCredential()).create_agent( + name=WRITER_AGENT_NAME, + instructions=instructions, + ) + + +def notify_user_for_approval(context: ActivityContext, content: dict[str, str]) -> None: + """Activity function to notify user for approval. + + Args: + context: The activity context + content: The generated content dictionary + """ + model = GeneratedContent.model_validate(content) + logger.info("NOTIFICATION: Please review the following content for approval:") + logger.info(f"Title: {model.title or '(untitled)'}") + logger.info(f"Content: {model.content}") + logger.info("Use the client to send approval or rejection.") + +def publish_content(context: ActivityContext, content: dict[str, str]) -> None: + """Activity function to publish approved content. + + Args: + context: The activity context + content: The generated content dictionary + """ + model = GeneratedContent.model_validate(content) + logger.info("PUBLISHING: Content has been published successfully:") + logger.info(f"Title: {model.title or '(untitled)'}") + logger.info(f"Content: {model.content}") + + +def content_generation_hitl_orchestration( + context: OrchestrationContext, + payload_raw: Any +) -> Generator[Task[Any], Any, dict[str, str]]: + """Human-in-the-loop orchestration for content generation with approval workflow. + + This orchestration: + 1. Generates initial content using WriterAgent + 2. Loops up to max_review_attempts times: + a. Notifies user for approval + b. Waits for approval event or timeout + c. If approved: publishes and returns + d. If rejected: incorporates feedback and regenerates + e. If timeout: raises TimeoutError + 3. Raises RuntimeError if max attempts exhausted + + Args: + context: The orchestration context + payload_raw: The input payload + + Returns: + dict: Result with published content + + Raises: + ValueError: If input is invalid or agent returns no content + TimeoutError: If human approval times out + RuntimeError: If max review attempts exhausted + """ + logger.debug("[Orchestration] Starting HITL content generation orchestration") + + # Validate input + if not isinstance(payload_raw, dict): + raise ValueError("Content generation input is required") + + try: + payload = ContentGenerationInput.model_validate(payload_raw) + except ValidationError as exc: + raise ValueError(f"Invalid content generation input: {exc}") from exc + + logger.debug(f"[Orchestration] Topic: {payload.topic}") + logger.debug(f"[Orchestration] Max attempts: {payload.max_review_attempts}") + logger.debug(f"[Orchestration] Approval timeout: {payload.approval_timeout_seconds}s") + + # Wrap the orchestration context to access agents + agent_context = DurableAIAgentOrchestrationContext(context) + + # Get the writer agent + writer = agent_context.get_agent(WRITER_AGENT_NAME) + writer_thread = writer.get_new_thread() + + logger.info(f"ThreadID: {writer_thread.session_id}") + + # Generate initial content + logger.info("[Orchestration] Generating initial content...") + + initial_response: AgentRunResponse = yield writer.run( + messages=f"Write a short article about '{payload.topic}'.", + thread=writer_thread, + response_format=GeneratedContent, + ) + content = cast(GeneratedContent, initial_response.value) + + if not isinstance(content, GeneratedContent): + raise ValueError("Agent returned no content after extraction.") + + logger.debug(f"[Orchestration] Initial content generated: {content.title}") + + # Review loop + attempt = 0 + while attempt < payload.max_review_attempts: + attempt += 1 + logger.debug(f"[Orchestration] Review iteration #{attempt}/{payload.max_review_attempts}") + + context.set_custom_status(f"Requesting human feedback (Attempt {attempt}, timeout {payload.approval_timeout_seconds}s)") + + # Notify user for approval + yield context.call_activity( + "notify_user_for_approval", + input=content.model_dump() + ) + + logger.debug("[Orchestration] Waiting for human approval or timeout...") + + # Wait for approval event or timeout + approval_task: Task[Any] = context.wait_for_external_event(HUMAN_APPROVAL_EVENT) # type: ignore + timeout_task: Task[Any] = context.create_timer( # type: ignore + context.current_utc_datetime + timedelta(seconds=payload.approval_timeout_seconds) + ) + + # Race between approval and timeout + winner_task = yield when_any([approval_task, timeout_task]) # type: ignore + + if winner_task == approval_task: + # Approval received before timeout + logger.debug("[Orchestration] Received human approval event") + + context.set_custom_status("Content reviewed by human reviewer.") + + # Parse approval + approval_data: Any = approval_task.get_result() # type: ignore + logger.debug(f"[Orchestration] Approval data: {approval_data}") + + # Handle different formats of approval_data + if isinstance(approval_data, dict): + approval = HumanApproval.model_validate(approval_data) + elif isinstance(approval_data, str): + # Try to parse as boolean-like string + lower_data = approval_data.lower().strip() + if lower_data in {"true", "yes", "approved", "y", "1"}: + approval = HumanApproval(approved=True, feedback="") + elif lower_data in {"false", "no", "rejected", "n", "0"}: + approval = HumanApproval(approved=False, feedback="") + else: + approval = HumanApproval(approved=False, feedback=approval_data) + else: + approval = HumanApproval(approved=False, feedback=str(approval_data)) # type: ignore + + if approval.approved: + # Content approved - publish and return + logger.debug("[Orchestration] Content approved! Publishing...") + context.set_custom_status("Content approved by human reviewer. Publishing...") + publish_task: Task[Any] = context.call_activity( + "publish_content", + input=content.model_dump() + ) + yield publish_task + + logger.debug("[Orchestration] Content published successfully") + return {"content": content.content, "title": content.title} + + # Content rejected - incorporate feedback and regenerate + logger.debug(f"[Orchestration] Content rejected. Feedback: {approval.feedback}") + context.set_custom_status(f"Content rejected by human reviewer. Regenerating...") + + rewrite_prompt = ( + "The content was rejected by a human reviewer. Please rewrite the article incorporating their feedback.\n\n" + f"Human Feedback: {approval.feedback or 'No specific feedback provided.'}" + ) + + logger.debug("[Orchestration] Regenerating content with feedback...") + + logger.warning(f"Regenerating with ThreadID: {writer_thread.session_id}") + + rewrite_response: AgentRunResponse = yield writer.run( + messages=rewrite_prompt, + thread=writer_thread, + response_format=GeneratedContent, + ) + rewritten_content = cast(GeneratedContent, rewrite_response.value) + + if not isinstance(rewritten_content, GeneratedContent): + raise ValueError("Agent returned no content after rewrite.") + + content = rewritten_content + logger.debug(f"[Orchestration] Content regenerated: {content.title}") + + else: + # Timeout occurred + logger.error(f"[Orchestration] Approval timeout after {payload.approval_timeout_seconds}s") + + raise TimeoutError( + f"Human approval timed out after {payload.approval_timeout_seconds} second(s)." + ) + + # Max attempts exhausted + raise RuntimeError( + f"Content could not be approved after {payload.max_review_attempts} iteration(s)." + ) + + +def get_worker( + taskhub: str | None = None, + endpoint: str | None = None, + log_handler: logging.Handler | None = None +) -> DurableTaskSchedulerWorker: + """Create a configured DurableTaskSchedulerWorker. + + Args: + taskhub: Task hub name (defaults to TASKHUB env var or "default") + endpoint: Scheduler endpoint (defaults to ENDPOINT env var or "http://localhost:8080") + log_handler: Optional logging handler + + Returns: + Configured DurableTaskSchedulerWorker instance + """ + taskhub_name = taskhub or os.getenv("TASKHUB", "default") + endpoint_url = endpoint or os.getenv("ENDPOINT", "http://localhost:8080") + + logger.debug(f"Using taskhub: {taskhub_name}") + logger.debug(f"Using endpoint: {endpoint_url}") + + credential = None if endpoint_url == "http://localhost:8080" else DefaultAzureCredential() + + return DurableTaskSchedulerWorker( + host_address=endpoint_url, + secure_channel=endpoint_url != "http://localhost:8080", + taskhub=taskhub_name, + token_credential=credential, + log_handler=log_handler + ) + + +def setup_worker(worker: DurableTaskSchedulerWorker) -> DurableAIAgentWorker: + """Set up the worker with agents, orchestrations, and activities registered. + + Args: + worker: The DurableTaskSchedulerWorker instance + + Returns: + DurableAIAgentWorker with agents, orchestrations, and activities registered + """ + # Wrap it with the agent worker + agent_worker = DurableAIAgentWorker(worker) + + # Create and register the writer agent + logger.debug("Creating and registering Writer agent...") + writer_agent = create_writer_agent() + agent_worker.add_agent(writer_agent) + + logger.debug(f"✓ Registered agent: {writer_agent.name}") + + # Register activity functions + logger.debug("Registering activity functions...") + worker.add_activity(notify_user_for_approval) # type: ignore + worker.add_activity(publish_content) # type: ignore + logger.debug(f"✓ Registered activity: notify_user_for_approval") + logger.debug(f"✓ Registered activity: publish_content") + + # Register the orchestration function + logger.debug("Registering orchestration function...") + worker.add_orchestrator(content_generation_hitl_orchestration) # type: ignore + logger.debug(f"✓ Registered orchestration: {content_generation_hitl_orchestration.__name__}") + + return agent_worker + + +async def main(): + """Main entry point for the worker process.""" + logger.debug("Starting Durable Task HITL Content Generation Worker...") + + # Create a worker using the helper function + worker = get_worker() + + # Setup worker with agents, orchestrations, and activities + setup_worker(worker) + + logger.debug("Worker is ready and listening for requests...") + logger.debug("Press Ctrl+C to stop.") + + try: + # Start the worker (this blocks until stopped) + worker.start() + + # Keep the worker running + while True: + await asyncio.sleep(1) + except KeyboardInterrupt: + logger.debug("Worker shutdown initiated") + + logger.debug("Worker stopped") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/getting_started/durabletask/README.md b/python/samples/getting_started/durabletask/README.md index 3b63294756..c55bacb027 100644 --- a/python/samples/getting_started/durabletask/README.md +++ b/python/samples/getting_started/durabletask/README.md @@ -2,9 +2,17 @@ This directory contains samples for durable agent hosting using the Durable Task Scheduler. These samples demonstrate the worker-client architecture pattern, enabling distributed agent execution with persistent conversation state. -- **[01_single_agent](01_single_agent/)**: A sample that demonstrates how to host a single conversational agent using the Durable Task Scheduler and interact with it via a client. -- **[04_single_agent_orchestration_chaining](04_single_agent_orchestration_chaining/)**: A sample that demonstrates how to chain multiple invocations of the same agent using a durable orchestration. -- **[05_multi_agent_orchestration_concurrency](05_multi_agent_orchestration_concurrency/)**: A sample that demonstrates how to host multiple agents and run them concurrently using a durable orchestration. +## Sample Catalog + +### Basic Patterns +- **[01_single_agent](01_single_agent/)**: Host a single conversational agent and interact with it via a client. Demonstrates basic worker-client architecture and agent state management. +- **[02_multi_agent](02_multi_agent/)**: Host multiple domain-specific agents (physicist and chemist) and route requests to the appropriate agent based on the question topic. + +### Orchestration Patterns +- **[04_single_agent_orchestration_chaining](04_single_agent_orchestration_chaining/)**: Chain multiple invocations of the same agent using durable orchestration, preserving conversation context across sequential runs. +- **[05_multi_agent_orchestration_concurrency](05_multi_agent_orchestration_concurrency/)**: Run multiple agents concurrently within an orchestration, aggregating their responses in parallel. +- **[06_multi_agent_orchestration_conditionals](06_multi_agent_orchestration_conditionals/)**: Implement conditional branching in orchestrations with spam detection and email assistant agents. Demonstrates structured outputs with Pydantic models and activity functions for side effects. +- **[07_single_agent_orchestration_hitl](07_single_agent_orchestration_hitl/)**: Human-in-the-loop pattern with external event handling, timeouts, and iterative refinement based on human feedback. Shows long-running workflows with external interactions. ## Running the Samples @@ -78,20 +86,35 @@ The DTS dashboard will be available at `http://localhost:8082`. Each sample reads configuration from environment variables. You'll need to set the following environment variables: +Bash (Linux/macOS/WSL): + ```bash export AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" export AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="your-deployment-name" ``` +PowerShell: + +```powershell +$env:AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" +$env:AZURE_OPENAI_CHAT_DEPLOYMENT_NAME="your-deployment-name" +``` + ### Installing Dependencies -Navigate to the sample directory and install dependencies: +Navigate to the sample directory and install dependencies. For example: ```bash cd samples/getting_started/durabletask/01_single_agent pip install -r requirements.txt ``` +If you're using `uv` for package management: + +```bash +uv pip install -r requirements.txt +``` + ### Running the Samples Each sample follows a worker-client architecture. Most samples provide separate `worker.py` and `client.py` files, though some include a combined `sample.py` for convenience. diff --git a/python/uv.lock b/python/uv.lock index 01edba7f1d..227860a479 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -471,8 +471,8 @@ dev = [ [package.metadata] requires-dist = [ { name = "agent-framework-core", editable = "packages/core" }, - { name = "durabletask", specifier = ">=1.1.0" }, - { name = "durabletask-azuremanaged", specifier = ">=1.1.0" }, + { name = "durabletask", specifier = ">=1.3.0" }, + { name = "durabletask-azuremanaged", specifier = ">=1.3.0" }, ] [package.metadata.requires-dev] @@ -519,7 +519,7 @@ math = [ tau2 = [ { name = "loguru", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] @@ -638,7 +638,7 @@ source = { editable = "packages/redis" } dependencies = [ { name = "agent-framework-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "redisvl", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] @@ -1051,15 +1051,15 @@ wheels = [ [[package]] name = "azure-core" -version = "1.37.0" +version = "1.38.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ef/83/41c9371c8298999c67b007e308a0a3c4d6a59c6908fa9c62101f031f886f/azure_core-1.37.0.tar.gz", hash = "sha256:7064f2c11e4b97f340e8e8c6d923b822978be3016e46b7bc4aa4b337cfb48aee", size = 357620, upload-time = "2025-12-11T20:05:13.518Z" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/34/a9914e676971a13d6cc671b1ed172f9804b50a3a80a143ff196e52f4c7ee/azure_core-1.37.0-py3-none-any.whl", hash = "sha256:b3abe2c59e7d6bb18b38c275a5029ff80f98990e7c90a5e646249a56630fcc19", size = 214006, upload-time = "2025-12-11T20:05:14.96Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" }, ] [[package]] @@ -1515,7 +1515,7 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform == 'win32'", ] dependencies = [ - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/58/01/1253e6698a07380cd31a736d248a3f2a50a7c88779a1813da27503cadc2a/contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880", size = 13466174, upload-time = "2025-07-26T12:03:12.549Z" } wheels = [ @@ -1871,7 +1871,7 @@ wheels = [ [[package]] name = "durabletask" -version = "1.2.0" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "asyncio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -1880,22 +1880,22 @@ dependencies = [ { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8d/fa/03427f72ad78a583ac47fcd24f6b929e3aa5f2a8ea4e80d047f6697f5c3d/durabletask-1.2.0.tar.gz", hash = "sha256:a5cde3afe9cdcb0a3a0d3dcd0a1b1bd3af6353ab3d62970865b50b0f12d39115", size = 57573, upload-time = "2026-01-07T18:04:36.474Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/27/3d021e6b36fc1aab6099fafc56dfc8059b4e8968615a26c1a0418601e50a/durabletask-1.3.0.tar.gz", hash = "sha256:11e38dda6df4737fadca0c71fc0a0f769955877c8a8bdb25ccbf90cf45afbf63", size = 57830, upload-time = "2026-01-12T21:54:30.465Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/87/aa251ef3baffbefa02d6ee151c5b8e391d64a26e08647f81ad1ec3513356/durabletask-1.2.0-py3-none-any.whl", hash = "sha256:db1165da26dbf3289831d56bac43ff096120d8d914e23a4d784c9695cda2c790", size = 63854, upload-time = "2026-01-07T18:04:35.183Z" }, + { url = "https://files.pythonhosted.org/packages/44/87/31ea460dbfaf50d9877f143e2ce9829cac2fb106747d9900cc353356ea77/durabletask-1.3.0-py3-none-any.whl", hash = "sha256:411f23e13391b8845edca010873dd7a87ee7cfc1fe05753ab28a7cd7c3c1bd77", size = 64112, upload-time = "2026-01-12T21:54:29.471Z" }, ] [[package]] name = "durabletask-azuremanaged" -version = "1.2.0" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-identity", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "durabletask", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6b/0b/81ae01349d1d9bafc34aff1ced24fe69e35d97fde0f61fe785690a211613/durabletask_azuremanaged-1.2.0.tar.gz", hash = "sha256:d00e5d22f859952fecb3aa9a8fcad5072469578f9df97bd9dc11388f96cf4deb", size = 4328, upload-time = "2026-01-07T18:07:50.819Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/29/6bb0b5fe51aa92e117adcdc93efe97cf5476d86c1496e5c5ab35d99a8d07/durabletask_azuremanaged-1.3.0.tar.gz", hash = "sha256:55172588e075afa80d46dcc2e5ddbd84be0a20cc78c74f687040c3720677d34c", size = 4343, upload-time = "2026-01-12T21:58:23.95Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/d0/b8823ba835cf0d7076a51c5bcb4d32c909e9c1c16464b11fdc599690bd14/durabletask_azuremanaged-1.2.0-py3-none-any.whl", hash = "sha256:2bdb0541045fed0dc9f48ab20a1578e14d34ffe38ead6650928fc4d6fe450b55", size = 6368, upload-time = "2026-01-07T18:07:49.882Z" }, + { url = "https://files.pythonhosted.org/packages/08/11/4d34fec302c4813e626080f1532d189767eb31d6d80e8f3698c230512f14/durabletask_azuremanaged-1.3.0-py3-none-any.whl", hash = "sha256:9da914f569da1597c858d494a95eda37e4372726c0ee65f30080dcafab262d60", size = 6366, upload-time = "2026-01-12T21:58:23.28Z" }, ] [[package]] @@ -2693,11 +2693,11 @@ wheels = [ [[package]] name = "identify" -version = "2.6.15" +version = "2.6.16" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, + { url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" }, ] [[package]] @@ -3136,7 +3136,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.80.13" +version = "1.80.15" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3154,9 +3154,9 @@ dependencies = [ { name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tokenizers", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/b4/ef75838159aabee15ad93d65ee0e91d04ba0e310784b7b0d3f490cca270c/litellm-1.80.13.tar.gz", hash = "sha256:61ed22dfad633ada3b97dd8a50d8e8d804da0115105006d2f9d77ba3fb247a0b", size = 13277620, upload-time = "2026-01-09T04:37:08.529Z" } +sdist = { url = "https://files.pythonhosted.org/packages/12/41/9b28df3e4739df83ddb32dfb2bccb12ad271d986494c9fd60e4927a0a6c3/litellm-1.80.15.tar.gz", hash = "sha256:759d09f33c9c6028c58dcdf71781b17b833ee926525714e09a408602be27f54e", size = 13376508, upload-time = "2026-01-11T18:31:44.95Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a6/90/e8e0ad7f57d3a56c3411b3867e02768f9722b5975a263c8aaaaba6693d91/litellm-1.80.13-py3-none-any.whl", hash = "sha256:43dcdbca010961f17d7a5a6a995a38d1a46101350959b0e8269576cfe913cf0b", size = 11562501, upload-time = "2026-01-09T04:37:05.551Z" }, + { url = "https://files.pythonhosted.org/packages/df/3b/b1bd693721ccb3c9a37c8233d019a643ac57bef5a93f279e5a63839ee4db/litellm-1.80.15-py3-none-any.whl", hash = "sha256:f354e49456985a235b9ed99df1c19d686d30501f96e68882dcc5b29b1e7c59d9", size = 11670707, upload-time = "2026-01-11T18:31:41.67Z" }, ] [package.optional-dependencies] @@ -3198,11 +3198,11 @@ wheels = [ [[package]] name = "litellm-proxy-extras" -version = "0.4.20" +version = "0.4.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/44/ab/df85ce715ebf488cacba338607f67d46c4e4db0b717c9d2f904b8dc7de12/litellm_proxy_extras-0.4.20.tar.gz", hash = "sha256:4fcc95db25cc8b75abbc3f00bb79fd6b94edd1b838ad7bb12cf839b39c67923a", size = 21044, upload-time = "2026-01-07T19:11:32.562Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/1b/18fd5dd6b89bc7f10ea9af49fdb7239dcb77cf59c80030016ac2bc7284d2/litellm_proxy_extras-0.4.21.tar.gz", hash = "sha256:fa0e012984aa8e5114f88f4bad53d6abb589e5ca3eab445f74f8ddeceb62d848", size = 21364, upload-time = "2026-01-10T20:00:27.403Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/f5/eb350c49e7cf09db5b335aaeef410c2094e19c84bfe51733cab8470dc011/litellm_proxy_extras-0.4.20-py3-none-any.whl", hash = "sha256:7737cd693dd1aa0bd25ada6d300b37f42c8c18d1820535aceb0ed38ed21f68f5", size = 46565, upload-time = "2026-01-07T19:11:29.728Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/920d1a89196fe0ffb55d054312dbf5c2110cbffabbc77c71df0f0455c270/litellm_proxy_extras-0.4.21-py3-none-any.whl", hash = "sha256:83a1734e9773610945230606012e602bbcbfba1c60fde836d51102c1a296f166", size = 47136, upload-time = "2026-01-10T20:00:25.849Z" }, ] [[package]] @@ -3326,7 +3326,7 @@ dependencies = [ { name = "fonttools", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "kiwisolver", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pillow", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyparsing", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3493,7 +3493,7 @@ version = "0.5.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/4a/c27b42ed9b1c7d13d9ba8b6905dece787d6259152f2309338aed29b2447b/ml_dtypes-0.5.4.tar.gz", hash = "sha256:8ab06a50fb9bf9666dd0fe5dfb4676fa2b0ac0f31ecff72a6c3af8e22c063453", size = 692314, upload-time = "2025-11-17T22:32:31.031Z" } wheels = [ @@ -3848,7 +3848,7 @@ wheels = [ [[package]] name = "numpy" -version = "2.4.0" +version = "2.4.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'darwin'", @@ -3864,79 +3864,79 @@ resolution-markers = [ "python_full_version == '3.12.*' and sys_platform == 'win32'", "python_full_version == '3.11.*' and sys_platform == 'win32'", ] -sdist = { url = "https://files.pythonhosted.org/packages/a4/7a/6a3d14e205d292b738db449d0de649b373a59edb0d0b4493821d0a3e8718/numpy-2.4.0.tar.gz", hash = "sha256:6e504f7b16118198f138ef31ba24d985b124c2c469fe8467007cf30fd992f934", size = 20685720, upload-time = "2025-12-20T16:18:19.023Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/7e/7bae7cbcc2f8132271967aa03e03954fc1e48aa1f3bf32b29ca95fbef352/numpy-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:316b2f2584682318539f0bcaca5a496ce9ca78c88066579ebd11fd06f8e4741e", size = 16940166, upload-time = "2025-12-20T16:15:43.434Z" }, - { url = "https://files.pythonhosted.org/packages/0f/27/6c13f5b46776d6246ec884ac5817452672156a506d08a1f2abb39961930a/numpy-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2718c1de8504121714234b6f8241d0019450353276c88b9453c9c3d92e101db", size = 12641781, upload-time = "2025-12-20T16:15:45.701Z" }, - { url = "https://files.pythonhosted.org/packages/14/1c/83b4998d4860d15283241d9e5215f28b40ac31f497c04b12fa7f428ff370/numpy-2.4.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:21555da4ec4a0c942520ead42c3b0dc9477441e085c42b0fbdd6a084869a6f6b", size = 5470247, upload-time = "2025-12-20T16:15:47.943Z" }, - { url = "https://files.pythonhosted.org/packages/54/08/cbce72c835d937795571b0464b52069f869c9e78b0c076d416c5269d2718/numpy-2.4.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:413aa561266a4be2d06cd2b9665e89d9f54c543f418773076a76adcf2af08bc7", size = 6799807, upload-time = "2025-12-20T16:15:49.795Z" }, - { url = "https://files.pythonhosted.org/packages/ff/be/2e647961cd8c980591d75cdcd9e8f647d69fbe05e2a25613dc0a2ea5fb1a/numpy-2.4.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0feafc9e03128074689183031181fac0897ff169692d8492066e949041096548", size = 14701992, upload-time = "2025-12-20T16:15:51.615Z" }, - { url = "https://files.pythonhosted.org/packages/a2/fb/e1652fb8b6fd91ce6ed429143fe2e01ce714711e03e5b762615e7b36172c/numpy-2.4.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8fdfed3deaf1928fb7667d96e0567cdf58c2b370ea2ee7e586aa383ec2cb346", size = 16646871, upload-time = "2025-12-20T16:15:54.129Z" }, - { url = "https://files.pythonhosted.org/packages/62/23/d841207e63c4322842f7cd042ae981cffe715c73376dcad8235fb31debf1/numpy-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e06a922a469cae9a57100864caf4f8a97a1026513793969f8ba5b63137a35d25", size = 16487190, upload-time = "2025-12-20T16:15:56.147Z" }, - { url = "https://files.pythonhosted.org/packages/bc/a0/6a842c8421ebfdec0a230e65f61e0dabda6edbef443d999d79b87c273965/numpy-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:927ccf5cd17c48f801f4ed43a7e5673a2724bd2171460be3e3894e6e332ef83a", size = 18580762, upload-time = "2025-12-20T16:15:58.524Z" }, - { url = "https://files.pythonhosted.org/packages/0a/d1/c79e0046641186f2134dde05e6181825b911f8bdcef31b19ddd16e232847/numpy-2.4.0-cp311-cp311-win32.whl", hash = "sha256:882567b7ae57c1b1a0250208cc21a7976d8cbcc49d5a322e607e6f09c9e0bd53", size = 6233359, upload-time = "2025-12-20T16:16:00.938Z" }, - { url = "https://files.pythonhosted.org/packages/fc/f0/74965001d231f28184d6305b8cdc1b6fcd4bf23033f6cb039cfe76c9fca7/numpy-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b986403023c8f3bf8f487c2e6186afda156174d31c175f747d8934dfddf3479", size = 12601132, upload-time = "2025-12-20T16:16:02.484Z" }, - { url = "https://files.pythonhosted.org/packages/65/32/55408d0f46dfebce38017f5bd931affa7256ad6beac1a92a012e1fbc67a7/numpy-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:3f3096405acc48887458bbf9f6814d43785ac7ba2a57ea6442b581dedbc60ce6", size = 10573977, upload-time = "2025-12-20T16:16:04.77Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ff/f6400ffec95de41c74b8e73df32e3fff1830633193a7b1e409be7fb1bb8c/numpy-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a8b6bb8369abefb8bd1801b054ad50e02b3275c8614dc6e5b0373c305291037", size = 16653117, upload-time = "2025-12-20T16:16:06.709Z" }, - { url = "https://files.pythonhosted.org/packages/fd/28/6c23e97450035072e8d830a3c411bf1abd1f42c611ff9d29e3d8f55c6252/numpy-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e284ca13d5a8367e43734148622caf0b261b275673823593e3e3634a6490f83", size = 12369711, upload-time = "2025-12-20T16:16:08.758Z" }, - { url = "https://files.pythonhosted.org/packages/bc/af/acbef97b630ab1bb45e6a7d01d1452e4251aa88ce680ac36e56c272120ec/numpy-2.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:49ff32b09f5aa0cd30a20c2b39db3e669c845589f2b7fc910365210887e39344", size = 5198355, upload-time = "2025-12-20T16:16:10.902Z" }, - { url = "https://files.pythonhosted.org/packages/c1/c8/4e0d436b66b826f2e53330adaa6311f5cac9871a5b5c31ad773b27f25a74/numpy-2.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:36cbfb13c152b1c7c184ddac43765db8ad672567e7bafff2cc755a09917ed2e6", size = 6545298, upload-time = "2025-12-20T16:16:12.607Z" }, - { url = "https://files.pythonhosted.org/packages/ef/27/e1f5d144ab54eac34875e79037011d511ac57b21b220063310cb96c80fbc/numpy-2.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35ddc8f4914466e6fc954c76527aa91aa763682a4f6d73249ef20b418fe6effb", size = 14398387, upload-time = "2025-12-20T16:16:14.257Z" }, - { url = "https://files.pythonhosted.org/packages/67/64/4cb909dd5ab09a9a5d086eff9586e69e827b88a5585517386879474f4cf7/numpy-2.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc578891de1db95b2a35001b695451767b580bb45753717498213c5ff3c41d63", size = 16363091, upload-time = "2025-12-20T16:16:17.32Z" }, - { url = "https://files.pythonhosted.org/packages/9d/9c/8efe24577523ec6809261859737cf117b0eb6fdb655abdfdc81b2e468ce4/numpy-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98e81648e0b36e325ab67e46b5400a7a6d4a22b8a7c8e8bbfe20e7db7906bf95", size = 16176394, upload-time = "2025-12-20T16:16:19.524Z" }, - { url = "https://files.pythonhosted.org/packages/61/f0/1687441ece7b47a62e45a1f82015352c240765c707928edd8aef875d5951/numpy-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d57b5046c120561ba8fa8e4030fbb8b822f3063910fa901ffadf16e2b7128ad6", size = 18287378, upload-time = "2025-12-20T16:16:22.866Z" }, - { url = "https://files.pythonhosted.org/packages/d3/6f/f868765d44e6fc466467ed810ba9d8d6db1add7d4a748abfa2a4c99a3194/numpy-2.4.0-cp312-cp312-win32.whl", hash = "sha256:92190db305a6f48734d3982f2c60fa30d6b5ee9bff10f2887b930d7b40119f4c", size = 5955432, upload-time = "2025-12-20T16:16:25.06Z" }, - { url = "https://files.pythonhosted.org/packages/d4/b5/94c1e79fcbab38d1ca15e13777477b2914dd2d559b410f96949d6637b085/numpy-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:680060061adb2d74ce352628cb798cfdec399068aa7f07ba9fb818b2b3305f98", size = 12306201, upload-time = "2025-12-20T16:16:26.979Z" }, - { url = "https://files.pythonhosted.org/packages/70/09/c39dadf0b13bb0768cd29d6a3aaff1fb7c6905ac40e9aaeca26b1c086e06/numpy-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:39699233bc72dd482da1415dcb06076e32f60eddc796a796c5fb6c5efce94667", size = 10308234, upload-time = "2025-12-20T16:16:29.417Z" }, - { url = "https://files.pythonhosted.org/packages/a7/0d/853fd96372eda07c824d24adf02e8bc92bb3731b43a9b2a39161c3667cc4/numpy-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a152d86a3ae00ba5f47b3acf3b827509fd0b6cb7d3259665e63dafbad22a75ea", size = 16649088, upload-time = "2025-12-20T16:16:31.421Z" }, - { url = "https://files.pythonhosted.org/packages/e3/37/cc636f1f2a9f585434e20a3e6e63422f70bfe4f7f6698e941db52ea1ac9a/numpy-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:39b19251dec4de8ff8496cd0806cbe27bf0684f765abb1f4809554de93785f2d", size = 12364065, upload-time = "2025-12-20T16:16:33.491Z" }, - { url = "https://files.pythonhosted.org/packages/ed/69/0b78f37ca3690969beee54103ce5f6021709134e8020767e93ba691a72f1/numpy-2.4.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:009bd0ea12d3c784b6639a8457537016ce5172109e585338e11334f6a7bb88ee", size = 5192640, upload-time = "2025-12-20T16:16:35.636Z" }, - { url = "https://files.pythonhosted.org/packages/1d/2a/08569f8252abf590294dbb09a430543ec8f8cc710383abfb3e75cc73aeda/numpy-2.4.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5fe44e277225fd3dff6882d86d3d447205d43532c3627313d17e754fb3905a0e", size = 6541556, upload-time = "2025-12-20T16:16:37.276Z" }, - { url = "https://files.pythonhosted.org/packages/93/e9/a949885a4e177493d61519377952186b6cbfdf1d6002764c664ba28349b5/numpy-2.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f935c4493eda9069851058fa0d9e39dbf6286be690066509305e52912714dbb2", size = 14396562, upload-time = "2025-12-20T16:16:38.953Z" }, - { url = "https://files.pythonhosted.org/packages/99/98/9d4ad53b0e9ef901c2ef1d550d2136f5ac42d3fd2988390a6def32e23e48/numpy-2.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cfa5f29a695cb7438965e6c3e8d06e0416060cf0d709c1b1c1653a939bf5c2a", size = 16351719, upload-time = "2025-12-20T16:16:41.503Z" }, - { url = "https://files.pythonhosted.org/packages/28/de/5f3711a38341d6e8dd619f6353251a0cdd07f3d6d101a8fd46f4ef87f895/numpy-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba0cb30acd3ef11c94dc27fbfba68940652492bc107075e7ffe23057f9425681", size = 16176053, upload-time = "2025-12-20T16:16:44.552Z" }, - { url = "https://files.pythonhosted.org/packages/2a/5b/2a3753dc43916501b4183532e7ace862e13211042bceafa253afb5c71272/numpy-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60e8c196cd82cbbd4f130b5290007e13e6de3eca79f0d4d38014769d96a7c475", size = 18277859, upload-time = "2025-12-20T16:16:47.174Z" }, - { url = "https://files.pythonhosted.org/packages/2c/c5/a18bcdd07a941db3076ef489d036ab16d2bfc2eae0cf27e5a26e29189434/numpy-2.4.0-cp313-cp313-win32.whl", hash = "sha256:5f48cb3e88fbc294dc90e215d86fbaf1c852c63dbdb6c3a3e63f45c4b57f7344", size = 5953849, upload-time = "2025-12-20T16:16:49.554Z" }, - { url = "https://files.pythonhosted.org/packages/4f/f1/719010ff8061da6e8a26e1980cf090412d4f5f8060b31f0c45d77dd67a01/numpy-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:a899699294f28f7be8992853c0c60741f16ff199205e2e6cdca155762cbaa59d", size = 12302840, upload-time = "2025-12-20T16:16:51.227Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5a/b3d259083ed8b4d335270c76966cb6cf14a5d1b69e1a608994ac57a659e6/numpy-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9198f447e1dc5647d07c9a6bbe2063cc0132728cc7175b39dbc796da5b54920d", size = 10308509, upload-time = "2025-12-20T16:16:53.313Z" }, - { url = "https://files.pythonhosted.org/packages/31/01/95edcffd1bb6c0633df4e808130545c4f07383ab629ac7e316fb44fff677/numpy-2.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74623f2ab5cc3f7c886add4f735d1031a1d2be4a4ae63c0546cfd74e7a31ddf6", size = 12491815, upload-time = "2025-12-20T16:16:55.496Z" }, - { url = "https://files.pythonhosted.org/packages/59/ea/5644b8baa92cc1c7163b4b4458c8679852733fa74ca49c942cfa82ded4e0/numpy-2.4.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:0804a8e4ab070d1d35496e65ffd3cf8114c136a2b81f61dfab0de4b218aacfd5", size = 5320321, upload-time = "2025-12-20T16:16:57.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/4e/e10938106d70bc21319bd6a86ae726da37edc802ce35a3a71ecdf1fdfe7f/numpy-2.4.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:02a2038eb27f9443a8b266a66911e926566b5a6ffd1a689b588f7f35b81e7dc3", size = 6641635, upload-time = "2025-12-20T16:16:59.379Z" }, - { url = "https://files.pythonhosted.org/packages/b3/8d/a8828e3eaf5c0b4ab116924df82f24ce3416fa38d0674d8f708ddc6c8aac/numpy-2.4.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1889b3a3f47a7b5bee16bc25a2145bd7cb91897f815ce3499db64c7458b6d91d", size = 14456053, upload-time = "2025-12-20T16:17:01.768Z" }, - { url = "https://files.pythonhosted.org/packages/68/a1/17d97609d87d4520aa5ae2dcfb32305654550ac6a35effb946d303e594ce/numpy-2.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85eef4cb5625c47ee6425c58a3502555e10f45ee973da878ac8248ad58c136f3", size = 16401702, upload-time = "2025-12-20T16:17:04.235Z" }, - { url = "https://files.pythonhosted.org/packages/18/32/0f13c1b2d22bea1118356b8b963195446f3af124ed7a5adfa8fdecb1b6ca/numpy-2.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6dc8b7e2f4eb184b37655195f421836cfae6f58197b67e3ffc501f1333d993fa", size = 16242493, upload-time = "2025-12-20T16:17:06.856Z" }, - { url = "https://files.pythonhosted.org/packages/ae/23/48f21e3d309fbc137c068a1475358cbd3a901b3987dcfc97a029ab3068e2/numpy-2.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:44aba2f0cafd287871a495fb3163408b0bd25bbce135c6f621534a07f4f7875c", size = 18324222, upload-time = "2025-12-20T16:17:09.392Z" }, - { url = "https://files.pythonhosted.org/packages/ac/52/41f3d71296a3dcaa4f456aaa3c6fc8e745b43d0552b6bde56571bb4b4a0f/numpy-2.4.0-cp313-cp313t-win32.whl", hash = "sha256:20c115517513831860c573996e395707aa9fb691eb179200125c250e895fcd93", size = 6076216, upload-time = "2025-12-20T16:17:11.437Z" }, - { url = "https://files.pythonhosted.org/packages/35/ff/46fbfe60ab0710d2a2b16995f708750307d30eccbb4c38371ea9e986866e/numpy-2.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b48e35f4ab6f6a7597c46e301126ceba4c44cd3280e3750f85db48b082624fa4", size = 12444263, upload-time = "2025-12-20T16:17:13.182Z" }, - { url = "https://files.pythonhosted.org/packages/a3/e3/9189ab319c01d2ed556c932ccf55064c5d75bb5850d1df7a482ce0badead/numpy-2.4.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4d1cfce39e511069b11e67cd0bd78ceff31443b7c9e5c04db73c7a19f572967c", size = 10378265, upload-time = "2025-12-20T16:17:15.211Z" }, - { url = "https://files.pythonhosted.org/packages/ab/ed/52eac27de39d5e5a6c9aadabe672bc06f55e24a3d9010cd1183948055d76/numpy-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c95eb6db2884917d86cde0b4d4cf31adf485c8ec36bf8696dd66fa70de96f36b", size = 16647476, upload-time = "2025-12-20T16:17:17.671Z" }, - { url = "https://files.pythonhosted.org/packages/77/c0/990ce1b7fcd4e09aeaa574e2a0a839589e4b08b2ca68070f1acb1fea6736/numpy-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:65167da969cd1ec3a1df31cb221ca3a19a8aaa25370ecb17d428415e93c1935e", size = 12374563, upload-time = "2025-12-20T16:17:20.216Z" }, - { url = "https://files.pythonhosted.org/packages/37/7c/8c5e389c6ae8f5fd2277a988600d79e9625db3fff011a2d87ac80b881a4c/numpy-2.4.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3de19cfecd1465d0dcf8a5b5ea8b3155b42ed0b639dba4b71e323d74f2a3be5e", size = 5203107, upload-time = "2025-12-20T16:17:22.47Z" }, - { url = "https://files.pythonhosted.org/packages/e6/94/ca5b3bd6a8a70a5eec9a0b8dd7f980c1eff4b8a54970a9a7fef248ef564f/numpy-2.4.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6c05483c3136ac4c91b4e81903cb53a8707d316f488124d0398499a4f8e8ef51", size = 6538067, upload-time = "2025-12-20T16:17:24.001Z" }, - { url = "https://files.pythonhosted.org/packages/79/43/993eb7bb5be6761dde2b3a3a594d689cec83398e3f58f4758010f3b85727/numpy-2.4.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36667db4d6c1cea79c8930ab72fadfb4060feb4bfe724141cd4bd064d2e5f8ce", size = 14411926, upload-time = "2025-12-20T16:17:25.822Z" }, - { url = "https://files.pythonhosted.org/packages/03/75/d4c43b61de473912496317a854dac54f1efec3eeb158438da6884b70bb90/numpy-2.4.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a818668b674047fd88c4cddada7ab8f1c298812783e8328e956b78dc4807f9f", size = 16354295, upload-time = "2025-12-20T16:17:28.308Z" }, - { url = "https://files.pythonhosted.org/packages/b8/0a/b54615b47ee8736a6461a4bb6749128dd3435c5a759d5663f11f0e9af4ac/numpy-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1ee32359fb7543b7b7bd0b2f46294db27e29e7bbdf70541e81b190836cd83ded", size = 16190242, upload-time = "2025-12-20T16:17:30.993Z" }, - { url = "https://files.pythonhosted.org/packages/98/ce/ea207769aacad6246525ec6c6bbd66a2bf56c72443dc10e2f90feed29290/numpy-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e493962256a38f58283de033d8af176c5c91c084ea30f15834f7545451c42059", size = 18280875, upload-time = "2025-12-20T16:17:33.327Z" }, - { url = "https://files.pythonhosted.org/packages/17/ef/ec409437aa962ea372ed601c519a2b141701683ff028f894b7466f0ab42b/numpy-2.4.0-cp314-cp314-win32.whl", hash = "sha256:6bbaebf0d11567fa8926215ae731e1d58e6ec28a8a25235b8a47405d301332db", size = 6002530, upload-time = "2025-12-20T16:17:35.729Z" }, - { url = "https://files.pythonhosted.org/packages/5f/4a/5cb94c787a3ed1ac65e1271b968686521169a7b3ec0b6544bb3ca32960b0/numpy-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d857f55e7fdf7c38ab96c4558c95b97d1c685be6b05c249f5fdafcbd6f9899e", size = 12435890, upload-time = "2025-12-20T16:17:37.599Z" }, - { url = "https://files.pythonhosted.org/packages/48/a0/04b89db963af9de1104975e2544f30de89adbf75b9e75f7dd2599be12c79/numpy-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:bb50ce5fb202a26fd5404620e7ef820ad1ab3558b444cb0b55beb7ef66cd2d63", size = 10591892, upload-time = "2025-12-20T16:17:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/53/e5/d74b5ccf6712c06c7a545025a6a71bfa03bdc7e0568b405b0d655232fd92/numpy-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:355354388cba60f2132df297e2d53053d4063f79077b67b481d21276d61fc4df", size = 12494312, upload-time = "2025-12-20T16:17:41.714Z" }, - { url = "https://files.pythonhosted.org/packages/c2/08/3ca9cc2ddf54dfee7ae9a6479c071092a228c68aef08252aa08dac2af002/numpy-2.4.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:1d8f9fde5f6dc1b6fc34df8162f3b3079365468703fee7f31d4e0cc8c63baed9", size = 5322862, upload-time = "2025-12-20T16:17:44.145Z" }, - { url = "https://files.pythonhosted.org/packages/87/74/0bb63a68394c0c1e52670cfff2e309afa41edbe11b3327d9af29e4383f34/numpy-2.4.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e0434aa22c821f44eeb4c650b81c7fbdd8c0122c6c4b5a576a76d5a35625ecd9", size = 6644986, upload-time = "2025-12-20T16:17:46.203Z" }, - { url = "https://files.pythonhosted.org/packages/06/8f/9264d9bdbcf8236af2823623fe2f3981d740fc3461e2787e231d97c38c28/numpy-2.4.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40483b2f2d3ba7aad426443767ff5632ec3156ef09742b96913787d13c336471", size = 14457958, upload-time = "2025-12-20T16:17:48.017Z" }, - { url = "https://files.pythonhosted.org/packages/8c/d9/f9a69ae564bbc7236a35aa883319364ef5fd41f72aa320cc1cbe66148fe2/numpy-2.4.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6a7664ddd9746e20b7325351fe1a8408d0a2bf9c63b5e898290ddc8f09544", size = 16398394, upload-time = "2025-12-20T16:17:50.409Z" }, - { url = "https://files.pythonhosted.org/packages/34/c7/39241501408dde7f885d241a98caba5421061a2c6d2b2197ac5e3aa842d8/numpy-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ecb0019d44f4cdb50b676c5d0cb4b1eae8e15d1ed3d3e6639f986fc92b2ec52c", size = 16241044, upload-time = "2025-12-20T16:17:52.661Z" }, - { url = "https://files.pythonhosted.org/packages/7c/95/cae7effd90e065a95e59fe710eeee05d7328ed169776dfdd9f789e032125/numpy-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d0ffd9e2e4441c96a9c91ec1783285d80bf835b677853fc2770a89d50c1e48ac", size = 18321772, upload-time = "2025-12-20T16:17:54.947Z" }, - { url = "https://files.pythonhosted.org/packages/96/df/3c6c279accd2bfb968a76298e5b276310bd55d243df4fa8ac5816d79347d/numpy-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:77f0d13fa87036d7553bf81f0e1fe3ce68d14c9976c9851744e4d3e91127e95f", size = 6148320, upload-time = "2025-12-20T16:17:57.249Z" }, - { url = "https://files.pythonhosted.org/packages/92/8d/f23033cce252e7a75cae853d17f582e86534c46404dea1c8ee094a9d6d84/numpy-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b1f5b45829ac1848893f0ddf5cb326110604d6df96cdc255b0bf9edd154104d4", size = 12623460, upload-time = "2025-12-20T16:17:58.963Z" }, - { url = "https://files.pythonhosted.org/packages/a4/4f/1f8475907d1a7c4ef9020edf7f39ea2422ec896849245f00688e4b268a71/numpy-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:23a3e9d1a6f360267e8fbb38ba5db355a6a7e9be71d7fce7ab3125e88bb646c8", size = 10661799, upload-time = "2025-12-20T16:18:01.078Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ef/088e7c7342f300aaf3ee5f2c821c4b9996a1bef2aaf6a49cc8ab4883758e/numpy-2.4.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b54c83f1c0c0f1d748dca0af516062b8829d53d1f0c402be24b4257a9c48ada6", size = 16819003, upload-time = "2025-12-20T16:18:03.41Z" }, - { url = "https://files.pythonhosted.org/packages/ff/ce/a53017b5443b4b84517182d463fc7bcc2adb4faa8b20813f8e5f5aeb5faa/numpy-2.4.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:aabb081ca0ec5d39591fc33018cd4b3f96e1a2dd6756282029986d00a785fba4", size = 12567105, upload-time = "2025-12-20T16:18:05.594Z" }, - { url = "https://files.pythonhosted.org/packages/77/58/5ff91b161f2ec650c88a626c3905d938c89aaadabd0431e6d9c1330c83e2/numpy-2.4.0-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:8eafe7c36c8430b7794edeab3087dec7bf31d634d92f2af9949434b9d1964cba", size = 5395590, upload-time = "2025-12-20T16:18:08.031Z" }, - { url = "https://files.pythonhosted.org/packages/1d/4e/f1a084106df8c2df8132fc437e56987308e0524836aa7733721c8429d4fe/numpy-2.4.0-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:2f585f52b2baf07ff3356158d9268ea095e221371f1074fadea2f42544d58b4d", size = 6709947, upload-time = "2025-12-20T16:18:09.836Z" }, - { url = "https://files.pythonhosted.org/packages/63/09/3d8aeb809c0332c3f642da812ac2e3d74fc9252b3021f8c30c82e99e3f3d/numpy-2.4.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:32ed06d0fe9cae27d8fb5f400c63ccee72370599c75e683a6358dd3a4fb50aaf", size = 14535119, upload-time = "2025-12-20T16:18:12.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/7f/68f0fc43a2cbdc6bb239160c754d87c922f60fbaa0fa3cd3d312b8a7f5ee/numpy-2.4.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:57c540ed8fb1f05cb997c6761cd56db72395b0d6985e90571ff660452ade4f98", size = 16475815, upload-time = "2025-12-20T16:18:14.433Z" }, - { url = "https://files.pythonhosted.org/packages/11/73/edeacba3167b1ca66d51b1a5a14697c2c40098b5ffa01811c67b1785a5ab/numpy-2.4.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a39fb973a726e63223287adc6dafe444ce75af952d711e400f3bf2b36ef55a7b", size = 12489376, upload-time = "2025-12-20T16:18:16.524Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/34/2b1bc18424f3ad9af577f6ce23600319968a70575bd7db31ce66731bbef9/numpy-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0cce2a669e3c8ba02ee563c7835f92c153cf02edff1ae05e1823f1dde21b16a5", size = 16944563, upload-time = "2026-01-10T06:42:14.615Z" }, + { url = "https://files.pythonhosted.org/packages/2c/57/26e5f97d075aef3794045a6ca9eada6a4ed70eb9a40e7a4a93f9ac80d704/numpy-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:899d2c18024984814ac7e83f8f49d8e8180e2fbe1b2e252f2e7f1d06bea92425", size = 12645658, upload-time = "2026-01-10T06:42:17.298Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ba/80fc0b1e3cb2fd5c6143f00f42eb67762aa043eaa05ca924ecc3222a7849/numpy-2.4.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:09aa8a87e45b55a1c2c205d42e2808849ece5c484b2aab11fecabec3841cafba", size = 5474132, upload-time = "2026-01-10T06:42:19.637Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0a5b9a397f0e865ec171187c78d9b57e5588afc439a04ba9cab1ebb2c945/numpy-2.4.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:edee228f76ee2dab4579fad6f51f6a305de09d444280109e0f75df247ff21501", size = 6804159, upload-time = "2026-01-10T06:42:21.44Z" }, + { url = "https://files.pythonhosted.org/packages/86/9c/841c15e691c7085caa6fd162f063eff494099c8327aeccd509d1ab1e36ab/numpy-2.4.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a92f227dbcdc9e4c3e193add1a189a9909947d4f8504c576f4a732fd0b54240a", size = 14708058, upload-time = "2026-01-10T06:42:23.546Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9d/7862db06743f489e6a502a3b93136d73aea27d97b2cf91504f70a27501d6/numpy-2.4.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:538bf4ec353709c765ff75ae616c34d3c3dca1a68312727e8f2676ea644f8509", size = 16651501, upload-time = "2026-01-10T06:42:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/a6/9c/6fc34ebcbd4015c6e5f0c0ce38264010ce8a546cb6beacb457b84a75dfc8/numpy-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac08c63cb7779b85e9d5318e6c3518b424bc1f364ac4cb2c6136f12e5ff2dccc", size = 16492627, upload-time = "2026-01-10T06:42:28.938Z" }, + { url = "https://files.pythonhosted.org/packages/aa/63/2494a8597502dacda439f61b3c0db4da59928150e62be0e99395c3ad23c5/numpy-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f9c360ecef085e5841c539a9a12b883dff005fbd7ce46722f5e9cef52634d82", size = 18585052, upload-time = "2026-01-10T06:42:31.312Z" }, + { url = "https://files.pythonhosted.org/packages/6a/93/098e1162ae7522fc9b618d6272b77404c4656c72432ecee3abc029aa3de0/numpy-2.4.1-cp311-cp311-win32.whl", hash = "sha256:0f118ce6b972080ba0758c6087c3617b5ba243d806268623dc34216d69099ba0", size = 6236575, upload-time = "2026-01-10T06:42:33.872Z" }, + { url = "https://files.pythonhosted.org/packages/8c/de/f5e79650d23d9e12f38a7bc6b03ea0835b9575494f8ec94c11c6e773b1b1/numpy-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:18e14c4d09d55eef39a6ab5b08406e84bc6869c1e34eef45564804f90b7e0574", size = 12604479, upload-time = "2026-01-10T06:42:35.778Z" }, + { url = "https://files.pythonhosted.org/packages/dd/65/e1097a7047cff12ce3369bd003811516b20ba1078dbdec135e1cd7c16c56/numpy-2.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:6461de5113088b399d655d45c3897fa188766415d0f568f175ab071c8873bd73", size = 10578325, upload-time = "2026-01-10T06:42:38.518Z" }, + { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, + { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, + { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, + { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, + { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, + { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, + { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, + { url = "https://files.pythonhosted.org/packages/04/68/732d4b7811c00775f3bd522a21e8dd5a23f77eb11acdeb663e4a4ebf0ef4/numpy-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d797454e37570cfd61143b73b8debd623c3c0952959adb817dd310a483d58a1b", size = 16652495, upload-time = "2026-01-10T06:43:06.283Z" }, + { url = "https://files.pythonhosted.org/packages/20/ca/857722353421a27f1465652b2c66813eeeccea9d76d5f7b74b99f298e60e/numpy-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c55962006156aeef1629b953fd359064aa47e4d82cfc8e67f0918f7da3344f", size = 12368657, upload-time = "2026-01-10T06:43:09.094Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/2377c917513449cc6240031a79d30eb9a163d32a91e79e0da47c43f2c0c8/numpy-2.4.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:71abbea030f2cfc3092a0ff9f8c8fdefdc5e0bf7d9d9c99663538bb0ecdac0b9", size = 5197256, upload-time = "2026-01-10T06:43:13.634Z" }, + { url = "https://files.pythonhosted.org/packages/17/39/569452228de3f5de9064ac75137082c6214be1f5c532016549a7923ab4b5/numpy-2.4.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b55aa56165b17aaf15520beb9cbd33c9039810e0d9643dd4379e44294c7303e", size = 6545212, upload-time = "2026-01-10T06:43:15.661Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/77333f4d1e4dac4395385482557aeecf4826e6ff517e32ca48e1dafbe42a/numpy-2.4.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0faba4a331195bfa96f93dd9dfaa10b2c7aa8cda3a02b7fd635e588fe821bf5", size = 14402871, upload-time = "2026-01-10T06:43:17.324Z" }, + { url = "https://files.pythonhosted.org/packages/ba/87/d341e519956273b39d8d47969dd1eaa1af740615394fe67d06f1efa68773/numpy-2.4.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e3087f53e2b4428766b54932644d148613c5a595150533ae7f00dab2f319a8", size = 16359305, upload-time = "2026-01-10T06:43:19.376Z" }, + { url = "https://files.pythonhosted.org/packages/32/91/789132c6666288eaa20ae8066bb99eba1939362e8f1a534949a215246e97/numpy-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:49e792ec351315e16da54b543db06ca8a86985ab682602d90c60ef4ff4db2a9c", size = 16181909, upload-time = "2026-01-10T06:43:21.808Z" }, + { url = "https://files.pythonhosted.org/packages/cf/b8/090b8bd27b82a844bb22ff8fdf7935cb1980b48d6e439ae116f53cdc2143/numpy-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79e9e06c4c2379db47f3f6fc7a8652e7498251789bf8ff5bd43bf478ef314ca2", size = 18284380, upload-time = "2026-01-10T06:43:23.957Z" }, + { url = "https://files.pythonhosted.org/packages/67/78/722b62bd31842ff029412271556a1a27a98f45359dea78b1548a3a9996aa/numpy-2.4.1-cp313-cp313-win32.whl", hash = "sha256:3d1a100e48cb266090a031397863ff8a30050ceefd798f686ff92c67a486753d", size = 5957089, upload-time = "2026-01-10T06:43:27.535Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/cf32198b0b6e18d4fbfa9a21a992a7fca535b9bb2b0cdd217d4a3445b5ca/numpy-2.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:92a0e65272fd60bfa0d9278e0484c2f52fe03b97aedc02b357f33fe752c52ffb", size = 12307230, upload-time = "2026-01-10T06:43:29.298Z" }, + { url = "https://files.pythonhosted.org/packages/44/6c/534d692bfb7d0afe30611320c5fb713659dcb5104d7cc182aff2aea092f5/numpy-2.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:20d4649c773f66cc2fc36f663e091f57c3b7655f936a4c681b4250855d1da8f5", size = 10313125, upload-time = "2026-01-10T06:43:31.782Z" }, + { url = "https://files.pythonhosted.org/packages/da/a1/354583ac5c4caa566de6ddfbc42744409b515039e085fab6e0ff942e0df5/numpy-2.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f93bc6892fe7b0663e5ffa83b61aab510aacffd58c16e012bb9352d489d90cb7", size = 12496156, upload-time = "2026-01-10T06:43:34.237Z" }, + { url = "https://files.pythonhosted.org/packages/51/b0/42807c6e8cce58c00127b1dc24d365305189991f2a7917aa694a109c8d7d/numpy-2.4.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:178de8f87948163d98a4c9ab5bee4ce6519ca918926ec8df195af582de28544d", size = 5324663, upload-time = "2026-01-10T06:43:36.211Z" }, + { url = "https://files.pythonhosted.org/packages/fe/55/7a621694010d92375ed82f312b2f28017694ed784775269115323e37f5e2/numpy-2.4.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:98b35775e03ab7f868908b524fc0a84d38932d8daf7b7e1c3c3a1b6c7a2c9f15", size = 6645224, upload-time = "2026-01-10T06:43:37.884Z" }, + { url = "https://files.pythonhosted.org/packages/50/96/9fa8635ed9d7c847d87e30c834f7109fac5e88549d79ef3324ab5c20919f/numpy-2.4.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:941c2a93313d030f219f3a71fd3d91a728b82979a5e8034eb2e60d394a2b83f9", size = 14462352, upload-time = "2026-01-10T06:43:39.479Z" }, + { url = "https://files.pythonhosted.org/packages/03/d1/8cf62d8bb2062da4fb82dd5d49e47c923f9c0738032f054e0a75342faba7/numpy-2.4.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:529050522e983e00a6c1c6b67411083630de8b57f65e853d7b03d9281b8694d2", size = 16407279, upload-time = "2026-01-10T06:43:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/86/1c/95c86e17c6b0b31ce6ef219da00f71113b220bcb14938c8d9a05cee0ff53/numpy-2.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2302dc0224c1cbc49bb94f7064f3f923a971bfae45c33870dcbff63a2a550505", size = 16248316, upload-time = "2026-01-10T06:43:44.121Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/e7f5ff8697274c9d0fa82398b6a372a27e5cef069b37df6355ccb1f1db1a/numpy-2.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9171a42fcad32dcf3fa86f0a4faa5e9f8facefdb276f54b8b390d90447cff4e2", size = 18329884, upload-time = "2026-01-10T06:43:46.613Z" }, + { url = "https://files.pythonhosted.org/packages/37/a4/b073f3e9d77f9aec8debe8ca7f9f6a09e888ad1ba7488f0c3b36a94c03ac/numpy-2.4.1-cp313-cp313t-win32.whl", hash = "sha256:382ad67d99ef49024f11d1ce5dcb5ad8432446e4246a4b014418ba3a1175a1f4", size = 6081138, upload-time = "2026-01-10T06:43:48.854Z" }, + { url = "https://files.pythonhosted.org/packages/16/16/af42337b53844e67752a092481ab869c0523bc95c4e5c98e4dac4e9581ac/numpy-2.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:62fea415f83ad8fdb6c20840578e5fbaf5ddd65e0ec6c3c47eda0f69da172510", size = 12447478, upload-time = "2026-01-10T06:43:50.476Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f8/fa85b2eac68ec631d0b631abc448552cb17d39afd17ec53dcbcc3537681a/numpy-2.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a7870e8c5fc11aef57d6fea4b4085e537a3a60ad2cdd14322ed531fdca68d261", size = 10382981, upload-time = "2026-01-10T06:43:52.575Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a7/ef08d25698e0e4b4efbad8d55251d20fe2a15f6d9aa7c9b30cd03c165e6f/numpy-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:3869ea1ee1a1edc16c29bbe3a2f2a4e515cc3a44d43903ad41e0cacdbaf733dc", size = 16652046, upload-time = "2026-01-10T06:43:54.797Z" }, + { url = "https://files.pythonhosted.org/packages/8f/39/e378b3e3ca13477e5ac70293ec027c438d1927f18637e396fe90b1addd72/numpy-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e867df947d427cdd7a60e3e271729090b0f0df80f5f10ab7dd436f40811699c3", size = 12378858, upload-time = "2026-01-10T06:43:57.099Z" }, + { url = "https://files.pythonhosted.org/packages/c3/74/7ec6154f0006910ed1fdbb7591cf4432307033102b8a22041599935f8969/numpy-2.4.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:e3bd2cb07841166420d2fa7146c96ce00cb3410664cbc1a6be028e456c4ee220", size = 5207417, upload-time = "2026-01-10T06:43:59.037Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b7/053ac11820d84e42f8feea5cb81cc4fcd1091499b45b1ed8c7415b1bf831/numpy-2.4.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:f0a90aba7d521e6954670550e561a4cb925713bd944445dbe9e729b71f6cabee", size = 6542643, upload-time = "2026-01-10T06:44:01.852Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c4/2e7908915c0e32ca636b92e4e4a3bdec4cb1e7eb0f8aedf1ed3c68a0d8cd/numpy-2.4.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d558123217a83b2d1ba316b986e9248a1ed1971ad495963d555ccd75dcb1556", size = 14418963, upload-time = "2026-01-10T06:44:04.047Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c0/3ed5083d94e7ffd7c404e54619c088e11f2e1939a9544f5397f4adb1b8ba/numpy-2.4.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f44de05659b67d20499cbc96d49f2650769afcb398b79b324bb6e297bfe3844", size = 16363811, upload-time = "2026-01-10T06:44:06.207Z" }, + { url = "https://files.pythonhosted.org/packages/0e/68/42b66f1852bf525050a67315a4fb94586ab7e9eaa541b1bef530fab0c5dd/numpy-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:69e7419c9012c4aaf695109564e3387f1259f001b4326dfa55907b098af082d3", size = 16197643, upload-time = "2026-01-10T06:44:08.33Z" }, + { url = "https://files.pythonhosted.org/packages/d2/40/e8714fc933d85f82c6bfc7b998a0649ad9769a32f3494ba86598aaf18a48/numpy-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd257026eb1b34352e749d7cc1678b5eeec3e329ad8c9965a797e08ccba205", size = 18289601, upload-time = "2026-01-10T06:44:10.841Z" }, + { url = "https://files.pythonhosted.org/packages/80/9a/0d44b468cad50315127e884802351723daca7cf1c98d102929468c81d439/numpy-2.4.1-cp314-cp314-win32.whl", hash = "sha256:727c6c3275ddefa0dc078524a85e064c057b4f4e71ca5ca29a19163c607be745", size = 6005722, upload-time = "2026-01-10T06:44:13.332Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bb/c6513edcce5a831810e2dddc0d3452ce84d208af92405a0c2e58fd8e7881/numpy-2.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:7d5d7999df434a038d75a748275cd6c0094b0ecdb0837342b332a82defc4dc4d", size = 12438590, upload-time = "2026-01-10T06:44:15.006Z" }, + { url = "https://files.pythonhosted.org/packages/e9/da/a598d5cb260780cf4d255102deba35c1d072dc028c4547832f45dd3323a8/numpy-2.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:ce9ce141a505053b3c7bce3216071f3bf5c182b8b28930f14cd24d43932cd2df", size = 10596180, upload-time = "2026-01-10T06:44:17.386Z" }, + { url = "https://files.pythonhosted.org/packages/de/bc/ea3f2c96fcb382311827231f911723aeff596364eb6e1b6d1d91128aa29b/numpy-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:4e53170557d37ae404bf8d542ca5b7c629d6efa1117dac6a83e394142ea0a43f", size = 12498774, upload-time = "2026-01-10T06:44:19.467Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ab/ef9d939fe4a812648c7a712610b2ca6140b0853c5efea361301006c02ae5/numpy-2.4.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:a73044b752f5d34d4232f25f18160a1cc418ea4507f5f11e299d8ac36875f8a0", size = 5327274, upload-time = "2026-01-10T06:44:23.189Z" }, + { url = "https://files.pythonhosted.org/packages/bd/31/d381368e2a95c3b08b8cf7faac6004849e960f4a042d920337f71cef0cae/numpy-2.4.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:fb1461c99de4d040666ca0444057b06541e5642f800b71c56e6ea92d6a853a0c", size = 6648306, upload-time = "2026-01-10T06:44:25.012Z" }, + { url = "https://files.pythonhosted.org/packages/c8/e5/0989b44ade47430be6323d05c23207636d67d7362a1796ccbccac6773dd2/numpy-2.4.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423797bdab2eeefbe608d7c1ec7b2b4fd3c58d51460f1ee26c7500a1d9c9ee93", size = 14464653, upload-time = "2026-01-10T06:44:26.706Z" }, + { url = "https://files.pythonhosted.org/packages/10/a7/cfbe475c35371cae1358e61f20c5f075badc18c4797ab4354140e1d283cf/numpy-2.4.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:52b5f61bdb323b566b528899cc7db2ba5d1015bda7ea811a8bcf3c89c331fa42", size = 16405144, upload-time = "2026-01-10T06:44:29.378Z" }, + { url = "https://files.pythonhosted.org/packages/f8/a3/0c63fe66b534888fa5177cc7cef061541064dbe2b4b60dcc60ffaf0d2157/numpy-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42d7dd5fa36d16d52a84f821eb96031836fd405ee6955dd732f2023724d0aa01", size = 16247425, upload-time = "2026-01-10T06:44:31.721Z" }, + { url = "https://files.pythonhosted.org/packages/6b/2b/55d980cfa2c93bd40ff4c290bf824d792bd41d2fe3487b07707559071760/numpy-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7b6b5e28bbd47b7532698e5db2fe1db693d84b58c254e4389d99a27bb9b8f6b", size = 18330053, upload-time = "2026-01-10T06:44:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/23/12/8b5fc6b9c487a09a7957188e0943c9ff08432c65e34567cabc1623b03a51/numpy-2.4.1-cp314-cp314t-win32.whl", hash = "sha256:5de60946f14ebe15e713a6f22850c2372fa72f4ff9a432ab44aa90edcadaa65a", size = 6152482, upload-time = "2026-01-10T06:44:36.798Z" }, + { url = "https://files.pythonhosted.org/packages/00/a5/9f8ca5856b8940492fc24fbe13c1bc34d65ddf4079097cf9e53164d094e1/numpy-2.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:8f085da926c0d491ffff3096f91078cc97ea67e7e6b65e490bc8dcda65663be2", size = 12627117, upload-time = "2026-01-10T06:44:38.828Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0d/eca3d962f9eef265f01a8e0d20085c6dd1f443cbffc11b6dede81fd82356/numpy-2.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:6436cffb4f2bf26c974344439439c95e152c9a527013f26b3577be6c2ca64295", size = 10667121, upload-time = "2026-01-10T06:44:41.644Z" }, + { url = "https://files.pythonhosted.org/packages/1e/48/d86f97919e79314a1cdee4c832178763e6e98e623e123d0bada19e92c15a/numpy-2.4.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ad35f20be147a204e28b6a0575fbf3540c5e5f802634d4258d55b1ff5facce1", size = 16822202, upload-time = "2026-01-10T06:44:43.738Z" }, + { url = "https://files.pythonhosted.org/packages/51/e9/1e62a7f77e0f37dcfb0ad6a9744e65df00242b6ea37dfafb55debcbf5b55/numpy-2.4.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8097529164c0f3e32bb89412a0905d9100bf434d9692d9fc275e18dcf53c9344", size = 12569985, upload-time = "2026-01-10T06:44:45.945Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7e/914d54f0c801342306fdcdce3e994a56476f1b818c46c47fc21ae968088c/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:ea66d2b41ca4a1630aae5507ee0a71647d3124d1741980138aa8f28f44dac36e", size = 5398484, upload-time = "2026-01-10T06:44:48.012Z" }, + { url = "https://files.pythonhosted.org/packages/1c/d8/9570b68584e293a33474e7b5a77ca404f1dcc655e40050a600dee81d27fb/numpy-2.4.1-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d3f8f0df9f4b8be57b3bf74a1d087fec68f927a2fab68231fdb442bf2c12e426", size = 6713216, upload-time = "2026-01-10T06:44:49.725Z" }, + { url = "https://files.pythonhosted.org/packages/33/9b/9dd6e2db8d49eb24f86acaaa5258e5f4c8ed38209a4ee9de2d1a0ca25045/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2023ef86243690c2791fd6353e5b4848eedaa88ca8a2d129f462049f6d484696", size = 14538937, upload-time = "2026-01-10T06:44:51.498Z" }, + { url = "https://files.pythonhosted.org/packages/53/87/d5bd995b0f798a37105b876350d346eea5838bd8f77ea3d7a48392f3812b/numpy-2.4.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8361ea4220d763e54cff2fbe7d8c93526b744f7cd9ddab47afeff7e14e8503be", size = 16479830, upload-time = "2026-01-10T06:44:53.931Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c7/b801bf98514b6ae6475e941ac05c58e6411dd863ea92916bfd6d510b08c1/numpy-2.4.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:4f1b68ff47680c2925f8063402a693ede215f0257f02596b1318ecdfb1d79e33", size = 12492579, upload-time = "2026-01-10T06:44:57.094Z" }, ] [[package]] @@ -3963,7 +3963,7 @@ wheels = [ [[package]] name = "openai" -version = "2.14.0" +version = "2.15.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -3975,9 +3975,9 @@ dependencies = [ { name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/f4/4690ecb5d70023ce6bfcfeabfe717020f654bde59a775058ec6ac4692463/openai-2.15.0.tar.gz", hash = "sha256:42eb8cbb407d84770633f31bf727d4ffb4138711c670565a41663d9439174fba", size = 627383, upload-time = "2026-01-09T22:10:08.603Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" }, + { url = "https://files.pythonhosted.org/packages/b5/df/c306f7375d42bafb379934c2df4c2fa3964656c8c782bac75ee10c102818/openai-2.15.0-py3-none-any.whl", hash = "sha256:6ae23b932cd7230f7244e52954daa6602716d6b9bf235401a107af731baea6c3", size = 1067879, upload-time = "2026-01-09T22:10:06.446Z" }, ] [[package]] @@ -4000,7 +4000,7 @@ wheels = [ [[package]] name = "openai-chatkit" -version = "1.5.1" +version = "1.5.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4009,9 +4009,9 @@ dependencies = [ { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "uvicorn", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f0/3c/d14634a2303752d58b872dadca2921a349de6b4b798840af78370ec5412d/openai_chatkit-1.5.1.tar.gz", hash = "sha256:54065f1ce98ce871f3f396b8c63a3579aad02663db17a3b01e4fd432a4029b14", size = 59200, upload-time = "2026-01-09T18:48:28.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/f3/3e7aafd6c29348e60d32082fb14e539661fe4100453a31b34d0fef1ff7b7/openai_chatkit-1.5.2.tar.gz", hash = "sha256:187d27b815f153fa060337c86ee3aab189f72269f23ac2bb2a35c6c88b83846d", size = 59268, upload-time = "2026-01-10T00:59:41.215Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/f5/3b2e6ec4c5a6099b7bf4ebb831043c8c8bfd42cbc725e6ddf98d9eb4da57/openai_chatkit-1.5.1-py3-none-any.whl", hash = "sha256:c026c8ba5bd18424d56b5027ac3e01fc2d9c93657b8984d03734dac21ca0f716", size = 41415, upload-time = "2026-01-09T18:48:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/dc/b6/475a4c723fb2e0de30feea505505eabe77666aa7d81855d356fb289e3d8a/openai_chatkit-1.5.2-py3-none-any.whl", hash = "sha256:3bf3f140f314924ef1d4148ce5174cff6aa4c5d1760f988ba2aa267fd434f960", size = 41482, upload-time = "2026-01-10T00:59:40.023Z" }, ] [[package]] @@ -4278,7 +4278,7 @@ version = "2.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "python-dateutil", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pytz", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "tzdata", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -4515,28 +4515,30 @@ wheels = [ [[package]] name = "polars" -version = "1.36.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "polars-runtime-32", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/dc/56f2a90c79a2cb13f9e956eab6385effe54216ae7a2068b3a6406bae4345/polars-1.36.1.tar.gz", hash = "sha256:12c7616a2305559144711ab73eaa18814f7aa898c522e7645014b68f1432d54c", size = 711993, upload-time = "2025-12-10T01:14:53.033Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/b5/ce40267c54b66f93572d84f7ba1c216b72a71cb2235e3724fab0911541fe/polars-1.37.0.tar.gz", hash = "sha256:6bbbeefb6f02f848d46ad4f4e922a92573986fd38611801c696bae98b02be4c8", size = 715429, upload-time = "2026-01-10T12:28:06.741Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f6/c6/36a1b874036b49893ecae0ac44a2f63d1a76e6212631a5b2f50a86e0e8af/polars-1.36.1-py3-none-any.whl", hash = "sha256:853c1bbb237add6a5f6d133c15094a9b727d66dd6a4eb91dbb07cdb056b2b8ef", size = 802429, upload-time = "2025-12-10T01:13:53.838Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/d890382bbfdeb25db039ef4a8c8f93b3faf0016e18130513274204954203/polars-1.37.0-py3-none-any.whl", hash = "sha256:fcc549b9923ef1bd6fd99b5fd0a00dfedf85406f4758ae018a69bcd18a91f113", size = 805614, upload-time = "2026-01-10T12:26:47.897Z" }, ] [[package]] name = "polars-runtime-32" -version = "1.36.1" +version = "1.37.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/31/df/597c0ef5eb8d761a16d72327846599b57c5d40d7f9e74306fc154aba8c37/polars_runtime_32-1.36.1.tar.gz", hash = "sha256:201c2cfd80ceb5d5cd7b63085b5fd08d6ae6554f922bcb941035e39638528a09", size = 2788751, upload-time = "2025-12-10T01:14:54.172Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/92/b818590a5ebcc55657f5483f26133174bd2b9ca88457b60c93669a9d0c75/polars_runtime_32-1.37.0.tar.gz", hash = "sha256:954ddb056e3a2db2cbcaae501225ac5604d1599b6debd9c6dbdf8efbac0e6511", size = 2820371, upload-time = "2026-01-10T12:28:08.195Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/ea/871129a2d296966c0925b078a9a93c6c5e7facb1c5eebfcd3d5811aeddc1/polars_runtime_32-1.36.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:327b621ca82594f277751f7e23d4b939ebd1be18d54b4cdf7a2f8406cecc18b2", size = 43494311, upload-time = "2025-12-10T01:13:56.096Z" }, - { url = "https://files.pythonhosted.org/packages/d8/76/0038210ad1e526ce5bb2933b13760d6b986b3045eccc1338e661bd656f77/polars_runtime_32-1.36.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:ab0d1f23084afee2b97de8c37aa3e02ec3569749ae39571bd89e7a8b11ae9e83", size = 39300602, upload-time = "2025-12-10T01:13:59.366Z" }, - { url = "https://files.pythonhosted.org/packages/54/1e/2707bee75a780a953a77a2c59829ee90ef55708f02fc4add761c579bf76e/polars_runtime_32-1.36.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:899b9ad2e47ceb31eb157f27a09dbc2047efbf4969a923a6b1ba7f0412c3e64c", size = 44511780, upload-time = "2025-12-10T01:14:02.285Z" }, - { url = "https://files.pythonhosted.org/packages/11/b2/3fede95feee441be64b4bcb32444679a8fbb7a453a10251583053f6efe52/polars_runtime_32-1.36.1-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:d9d077bb9df711bc635a86540df48242bb91975b353e53ef261c6fae6cb0948f", size = 40688448, upload-time = "2025-12-10T01:14:05.131Z" }, - { url = "https://files.pythonhosted.org/packages/05/0f/e629713a72999939b7b4bfdbf030a32794db588b04fdf3dc977dd8ea6c53/polars_runtime_32-1.36.1-cp39-abi3-win_amd64.whl", hash = "sha256:cc17101f28c9a169ff8b5b8d4977a3683cd403621841623825525f440b564cf0", size = 44464898, upload-time = "2025-12-10T01:14:08.296Z" }, - { url = "https://files.pythonhosted.org/packages/d1/d8/a12e6aa14f63784cead437083319ec7cece0d5bb9a5bfe7678cc6578b52a/polars_runtime_32-1.36.1-cp39-abi3-win_arm64.whl", hash = "sha256:809e73857be71250141225ddd5d2b30c97e6340aeaa0d445f930e01bef6888dc", size = 39798896, upload-time = "2025-12-10T01:14:11.568Z" }, + { url = "https://files.pythonhosted.org/packages/f0/67/76162c9fcc71b917bdfd2804eaf0ab7cdb264a89b89af4f195a918f9f97d/polars_runtime_32-1.37.0-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3591f4b8e734126d713a12869d3727360acbbcd1d440b45d830497a317a5a8b3", size = 43518436, upload-time = "2026-01-10T12:26:51.442Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ec/56f328e8fa4ebea453f5bc10c579774dff774a873ff224b3108d53c514f9/polars_runtime_32-1.37.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:47849420859159681e94589daad3a04ff66a2379c116ccd812d043f7ffe0094c", size = 39663939, upload-time = "2026-01-10T12:26:54.664Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b2/f1ea0edba327a92ce0158b7a0e4abe21f541e44c9fb8ec932cc47592ca5c/polars_runtime_32-1.37.0-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4648ea1e821b9a841b2a562f27bcf54ff1ad21f9c217adcf0f7d0b3c33dc6400", size = 41481348, upload-time = "2026-01-10T12:26:57.598Z" }, + { url = "https://files.pythonhosted.org/packages/3b/21/788a3dd724bb21cf42e2f4daa6510a47787e8b30dd535aa6cae20ea968d0/polars_runtime_32-1.37.0-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5272b6f1680a3e0d77c9f07cb5a54f307079eb5d519c71aa3c37b9af0ee03a9e", size = 45168069, upload-time = "2026-01-10T12:27:00.98Z" }, + { url = "https://files.pythonhosted.org/packages/8a/73/823d6534a20ebdcec4b7706ab2b3f2cfb8e07571305f4e7381cc22d83e31/polars_runtime_32-1.37.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:73301ef4fe80d8d748085259a4063ac52ff058088daa702e2a75e7d1ab7f14fc", size = 41675645, upload-time = "2026-01-10T12:27:04.334Z" }, + { url = "https://files.pythonhosted.org/packages/30/54/1bacad96dc2b67d33b886a45b249777212782561493718785cb27c7c362a/polars_runtime_32-1.37.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c60d523d738a7b3660d9abdfaff798f7602488f469d427865965b0bd2e40473a", size = 44737715, upload-time = "2026-01-10T12:27:08.152Z" }, + { url = "https://files.pythonhosted.org/packages/38/e3/aad525d8d89b903fcfa2bd0b4cb66b8a6e83e80b3d1348c5a428092d2983/polars_runtime_32-1.37.0-cp310-abi3-win_amd64.whl", hash = "sha256:f87f76f16e8030d277ecca0c0976aca62ec2b6ba2099ee9c6f75dfc97e7dc1b1", size = 45018403, upload-time = "2026-01-10T12:27:11.292Z" }, + { url = "https://files.pythonhosted.org/packages/0e/4d/ddcaa5f2e18763e02e66d0fd2efca049a42fe96fbeda188e89aeb38dd6fa/polars_runtime_32-1.37.0-cp310-abi3-win_arm64.whl", hash = "sha256:7ffbd9487e3668b0a57519f7ab5ab53ab656086db9f62dceaab41393a07be721", size = 41026243, upload-time = "2026-01-10T12:27:14.563Z" }, ] [[package]] @@ -5352,7 +5354,7 @@ dependencies = [ { name = "grpcio", version = "1.76.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.14' and sys_platform == 'darwin') or (python_full_version >= '3.14' and sys_platform == 'linux') or (python_full_version >= '3.14' and sys_platform == 'win32')" }, { name = "httpx", extra = ["http2"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "protobuf", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5383,7 +5385,7 @@ dependencies = [ { name = "jsonpath-ng", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ml-dtypes", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "pydantic", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "python-ulid", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5799,8 +5801,8 @@ resolution-markers = [ ] dependencies = [ { name = "joblib", marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, - { name = "scipy", version = "1.16.3", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "scipy", version = "1.17.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "threadpoolctl", marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, ] sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" } @@ -5906,7 +5908,7 @@ wheels = [ [[package]] name = "scipy" -version = "1.16.3" +version = "1.17.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'darwin'", @@ -5923,70 +5925,70 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform == 'win32'", ] dependencies = [ - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/5f/6f37d7439de1455ce9c5a556b8d1db0979f03a796c030bafdf08d35b7bf9/scipy-1.16.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:40be6cf99e68b6c4321e9f8782e7d5ff8265af28ef2cd56e9c9b2638fa08ad97", size = 36630881, upload-time = "2025-10-28T17:31:47.104Z" }, - { url = "https://files.pythonhosted.org/packages/7c/89/d70e9f628749b7e4db2aa4cd89735502ff3f08f7b9b27d2e799485987cd9/scipy-1.16.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:8be1ca9170fcb6223cc7c27f4305d680ded114a1567c0bd2bfcbf947d1b17511", size = 28941012, upload-time = "2025-10-28T17:31:53.411Z" }, - { url = "https://files.pythonhosted.org/packages/a8/a8/0e7a9a6872a923505dbdf6bb93451edcac120363131c19013044a1e7cb0c/scipy-1.16.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bea0a62734d20d67608660f69dcda23e7f90fb4ca20974ab80b6ed40df87a005", size = 20931935, upload-time = "2025-10-28T17:31:57.361Z" }, - { url = "https://files.pythonhosted.org/packages/bd/c7/020fb72bd79ad798e4dbe53938543ecb96b3a9ac3fe274b7189e23e27353/scipy-1.16.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:2a207a6ce9c24f1951241f4693ede2d393f59c07abc159b2cb2be980820e01fb", size = 23534466, upload-time = "2025-10-28T17:32:01.875Z" }, - { url = "https://files.pythonhosted.org/packages/be/a0/668c4609ce6dbf2f948e167836ccaf897f95fb63fa231c87da7558a374cd/scipy-1.16.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:532fb5ad6a87e9e9cd9c959b106b73145a03f04c7d57ea3e6f6bb60b86ab0876", size = 33593618, upload-time = "2025-10-28T17:32:06.902Z" }, - { url = "https://files.pythonhosted.org/packages/ca/6e/8942461cf2636cdae083e3eb72622a7fbbfa5cf559c7d13ab250a5dbdc01/scipy-1.16.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0151a0749efeaaab78711c78422d413c583b8cdd2011a3c1d6c794938ee9fdb2", size = 35899798, upload-time = "2025-10-28T17:32:12.665Z" }, - { url = "https://files.pythonhosted.org/packages/79/e8/d0f33590364cdbd67f28ce79368b373889faa4ee959588beddf6daef9abe/scipy-1.16.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7180967113560cca57418a7bc719e30366b47959dd845a93206fbed693c867e", size = 36226154, upload-time = "2025-10-28T17:32:17.961Z" }, - { url = "https://files.pythonhosted.org/packages/39/c1/1903de608c0c924a1749c590064e65810f8046e437aba6be365abc4f7557/scipy-1.16.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:deb3841c925eeddb6afc1e4e4a45e418d19ec7b87c5df177695224078e8ec733", size = 38878540, upload-time = "2025-10-28T17:32:23.907Z" }, - { url = "https://files.pythonhosted.org/packages/f1/d0/22ec7036ba0b0a35bccb7f25ab407382ed34af0b111475eb301c16f8a2e5/scipy-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:53c3844d527213631e886621df5695d35e4f6a75f620dca412bcd292f6b87d78", size = 38722107, upload-time = "2025-10-28T17:32:29.921Z" }, - { url = "https://files.pythonhosted.org/packages/7b/60/8a00e5a524bb3bf8898db1650d350f50e6cffb9d7a491c561dc9826c7515/scipy-1.16.3-cp311-cp311-win_arm64.whl", hash = "sha256:9452781bd879b14b6f055b26643703551320aa8d79ae064a71df55c00286a184", size = 25506272, upload-time = "2025-10-28T17:32:34.577Z" }, - { url = "https://files.pythonhosted.org/packages/40/41/5bf55c3f386b1643812f3a5674edf74b26184378ef0f3e7c7a09a7e2ca7f/scipy-1.16.3-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:81fc5827606858cf71446a5e98715ba0e11f0dbc83d71c7409d05486592a45d6", size = 36659043, upload-time = "2025-10-28T17:32:40.285Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0f/65582071948cfc45d43e9870bf7ca5f0e0684e165d7c9ef4e50d783073eb/scipy-1.16.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:c97176013d404c7346bf57874eaac5187d969293bf40497140b0a2b2b7482e07", size = 28898986, upload-time = "2025-10-28T17:32:45.325Z" }, - { url = "https://files.pythonhosted.org/packages/96/5e/36bf3f0ac298187d1ceadde9051177d6a4fe4d507e8f59067dc9dd39e650/scipy-1.16.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2b71d93c8a9936046866acebc915e2af2e292b883ed6e2cbe5c34beb094b82d9", size = 20889814, upload-time = "2025-10-28T17:32:49.277Z" }, - { url = "https://files.pythonhosted.org/packages/80/35/178d9d0c35394d5d5211bbff7ac4f2986c5488b59506fef9e1de13ea28d3/scipy-1.16.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3d4a07a8e785d80289dfe66b7c27d8634a773020742ec7187b85ccc4b0e7b686", size = 23565795, upload-time = "2025-10-28T17:32:53.337Z" }, - { url = "https://files.pythonhosted.org/packages/fa/46/d1146ff536d034d02f83c8afc3c4bab2eddb634624d6529a8512f3afc9da/scipy-1.16.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0553371015692a898e1aa858fed67a3576c34edefa6b7ebdb4e9dde49ce5c203", size = 33349476, upload-time = "2025-10-28T17:32:58.353Z" }, - { url = "https://files.pythonhosted.org/packages/79/2e/415119c9ab3e62249e18c2b082c07aff907a273741b3f8160414b0e9193c/scipy-1.16.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:72d1717fd3b5e6ec747327ce9bda32d5463f472c9dce9f54499e81fbd50245a1", size = 35676692, upload-time = "2025-10-28T17:33:03.88Z" }, - { url = "https://files.pythonhosted.org/packages/27/82/df26e44da78bf8d2aeaf7566082260cfa15955a5a6e96e6a29935b64132f/scipy-1.16.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fb2472e72e24d1530debe6ae078db70fb1605350c88a3d14bc401d6306dbffe", size = 36019345, upload-time = "2025-10-28T17:33:09.773Z" }, - { url = "https://files.pythonhosted.org/packages/82/31/006cbb4b648ba379a95c87262c2855cd0d09453e500937f78b30f02fa1cd/scipy-1.16.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c5192722cffe15f9329a3948c4b1db789fbb1f05c97899187dcf009b283aea70", size = 38678975, upload-time = "2025-10-28T17:33:15.809Z" }, - { url = "https://files.pythonhosted.org/packages/c2/7f/acbd28c97e990b421af7d6d6cd416358c9c293fc958b8529e0bd5d2a2a19/scipy-1.16.3-cp312-cp312-win_amd64.whl", hash = "sha256:56edc65510d1331dae01ef9b658d428e33ed48b4f77b1d51caf479a0253f96dc", size = 38555926, upload-time = "2025-10-28T17:33:21.388Z" }, - { url = "https://files.pythonhosted.org/packages/ce/69/c5c7807fd007dad4f48e0a5f2153038dc96e8725d3345b9ee31b2b7bed46/scipy-1.16.3-cp312-cp312-win_arm64.whl", hash = "sha256:a8a26c78ef223d3e30920ef759e25625a0ecdd0d60e5a8818b7513c3e5384cf2", size = 25463014, upload-time = "2025-10-28T17:33:25.975Z" }, - { url = "https://files.pythonhosted.org/packages/72/f1/57e8327ab1508272029e27eeef34f2302ffc156b69e7e233e906c2a5c379/scipy-1.16.3-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d2ec56337675e61b312179a1ad124f5f570c00f920cc75e1000025451b88241c", size = 36617856, upload-time = "2025-10-28T17:33:31.375Z" }, - { url = "https://files.pythonhosted.org/packages/44/13/7e63cfba8a7452eb756306aa2fd9b37a29a323b672b964b4fdeded9a3f21/scipy-1.16.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:16b8bc35a4cc24db80a0ec836a9286d0e31b2503cb2fd7ff7fb0e0374a97081d", size = 28874306, upload-time = "2025-10-28T17:33:36.516Z" }, - { url = "https://files.pythonhosted.org/packages/15/65/3a9400efd0228a176e6ec3454b1fa998fbbb5a8defa1672c3f65706987db/scipy-1.16.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:5803c5fadd29de0cf27fa08ccbfe7a9e5d741bf63e4ab1085437266f12460ff9", size = 20865371, upload-time = "2025-10-28T17:33:42.094Z" }, - { url = "https://files.pythonhosted.org/packages/33/d7/eda09adf009a9fb81827194d4dd02d2e4bc752cef16737cc4ef065234031/scipy-1.16.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:b81c27fc41954319a943d43b20e07c40bdcd3ff7cf013f4fb86286faefe546c4", size = 23524877, upload-time = "2025-10-28T17:33:48.483Z" }, - { url = "https://files.pythonhosted.org/packages/7d/6b/3f911e1ebc364cb81320223a3422aab7d26c9c7973109a9cd0f27c64c6c0/scipy-1.16.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0c3b4dd3d9b08dbce0f3440032c52e9e2ab9f96ade2d3943313dfe51a7056959", size = 33342103, upload-time = "2025-10-28T17:33:56.495Z" }, - { url = "https://files.pythonhosted.org/packages/21/f6/4bfb5695d8941e5c570a04d9fcd0d36bce7511b7d78e6e75c8f9791f82d0/scipy-1.16.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7dc1360c06535ea6116a2220f760ae572db9f661aba2d88074fe30ec2aa1ff88", size = 35697297, upload-time = "2025-10-28T17:34:04.722Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6496dadbc80d8d896ff72511ecfe2316b50313bfc3ebf07a3f580f08bd8c/scipy-1.16.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:663b8d66a8748051c3ee9c96465fb417509315b99c71550fda2591d7dd634234", size = 36021756, upload-time = "2025-10-28T17:34:13.482Z" }, - { url = "https://files.pythonhosted.org/packages/fe/bd/a8c7799e0136b987bda3e1b23d155bcb31aec68a4a472554df5f0937eef7/scipy-1.16.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eab43fae33a0c39006a88096cd7b4f4ef545ea0447d250d5ac18202d40b6611d", size = 38696566, upload-time = "2025-10-28T17:34:22.384Z" }, - { url = "https://files.pythonhosted.org/packages/cd/01/1204382461fcbfeb05b6161b594f4007e78b6eba9b375382f79153172b4d/scipy-1.16.3-cp313-cp313-win_amd64.whl", hash = "sha256:062246acacbe9f8210de8e751b16fc37458213f124bef161a5a02c7a39284304", size = 38529877, upload-time = "2025-10-28T17:35:51.076Z" }, - { url = "https://files.pythonhosted.org/packages/7f/14/9d9fbcaa1260a94f4bb5b64ba9213ceb5d03cd88841fe9fd1ffd47a45b73/scipy-1.16.3-cp313-cp313-win_arm64.whl", hash = "sha256:50a3dbf286dbc7d84f176f9a1574c705f277cb6565069f88f60db9eafdbe3ee2", size = 25455366, upload-time = "2025-10-28T17:35:59.014Z" }, - { url = "https://files.pythonhosted.org/packages/e2/a3/9ec205bd49f42d45d77f1730dbad9ccf146244c1647605cf834b3a8c4f36/scipy-1.16.3-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:fb4b29f4cf8cc5a8d628bc8d8e26d12d7278cd1f219f22698a378c3d67db5e4b", size = 37027931, upload-time = "2025-10-28T17:34:31.451Z" }, - { url = "https://files.pythonhosted.org/packages/25/06/ca9fd1f3a4589cbd825b1447e5db3a8ebb969c1eaf22c8579bd286f51b6d/scipy-1.16.3-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:8d09d72dc92742988b0e7750bddb8060b0c7079606c0d24a8cc8e9c9c11f9079", size = 29400081, upload-time = "2025-10-28T17:34:39.087Z" }, - { url = "https://files.pythonhosted.org/packages/6a/56/933e68210d92657d93fb0e381683bc0e53a965048d7358ff5fbf9e6a1b17/scipy-1.16.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:03192a35e661470197556de24e7cb1330d84b35b94ead65c46ad6f16f6b28f2a", size = 21391244, upload-time = "2025-10-28T17:34:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/a8/7e/779845db03dc1418e215726329674b40576879b91814568757ff0014ad65/scipy-1.16.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:57d01cb6f85e34f0946b33caa66e892aae072b64b034183f3d87c4025802a119", size = 23929753, upload-time = "2025-10-28T17:34:51.793Z" }, - { url = "https://files.pythonhosted.org/packages/4c/4b/f756cf8161d5365dcdef9e5f460ab226c068211030a175d2fc7f3f41ca64/scipy-1.16.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:96491a6a54e995f00a28a3c3badfff58fd093bf26cd5fb34a2188c8c756a3a2c", size = 33496912, upload-time = "2025-10-28T17:34:59.8Z" }, - { url = "https://files.pythonhosted.org/packages/09/b5/222b1e49a58668f23839ca1542a6322bb095ab8d6590d4f71723869a6c2c/scipy-1.16.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cd13e354df9938598af2be05822c323e97132d5e6306b83a3b4ee6724c6e522e", size = 35802371, upload-time = "2025-10-28T17:35:08.173Z" }, - { url = "https://files.pythonhosted.org/packages/c1/8d/5964ef68bb31829bde27611f8c9deeac13764589fe74a75390242b64ca44/scipy-1.16.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63d3cdacb8a824a295191a723ee5e4ea7768ca5ca5f2838532d9f2e2b3ce2135", size = 36190477, upload-time = "2025-10-28T17:35:16.7Z" }, - { url = "https://files.pythonhosted.org/packages/ab/f2/b31d75cb9b5fa4dd39a0a931ee9b33e7f6f36f23be5ef560bf72e0f92f32/scipy-1.16.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e7efa2681ea410b10dde31a52b18b0154d66f2485328830e45fdf183af5aefc6", size = 38796678, upload-time = "2025-10-28T17:35:26.354Z" }, - { url = "https://files.pythonhosted.org/packages/b4/1e/b3723d8ff64ab548c38d87055483714fefe6ee20e0189b62352b5e015bb1/scipy-1.16.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2d1ae2cf0c350e7705168ff2429962a89ad90c2d49d1dd300686d8b2a5af22fc", size = 38640178, upload-time = "2025-10-28T17:35:35.304Z" }, - { url = "https://files.pythonhosted.org/packages/8e/f3/d854ff38789aca9b0cc23008d607ced9de4f7ab14fa1ca4329f86b3758ca/scipy-1.16.3-cp313-cp313t-win_arm64.whl", hash = "sha256:0c623a54f7b79dd88ef56da19bc2873afec9673a48f3b85b18e4d402bdd29a5a", size = 25803246, upload-time = "2025-10-28T17:35:42.155Z" }, - { url = "https://files.pythonhosted.org/packages/99/f6/99b10fd70f2d864c1e29a28bbcaa0c6340f9d8518396542d9ea3b4aaae15/scipy-1.16.3-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:875555ce62743e1d54f06cdf22c1e0bc47b91130ac40fe5d783b6dfa114beeb6", size = 36606469, upload-time = "2025-10-28T17:36:08.741Z" }, - { url = "https://files.pythonhosted.org/packages/4d/74/043b54f2319f48ea940dd025779fa28ee360e6b95acb7cd188fad4391c6b/scipy-1.16.3-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:bb61878c18a470021fb515a843dc7a76961a8daceaaaa8bad1332f1bf4b54657", size = 28872043, upload-time = "2025-10-28T17:36:16.599Z" }, - { url = "https://files.pythonhosted.org/packages/4d/e1/24b7e50cc1c4ee6ffbcb1f27fe9f4c8b40e7911675f6d2d20955f41c6348/scipy-1.16.3-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f2622206f5559784fa5c4b53a950c3c7c1cf3e84ca1b9c4b6c03f062f289ca26", size = 20862952, upload-time = "2025-10-28T17:36:22.966Z" }, - { url = "https://files.pythonhosted.org/packages/dd/3a/3e8c01a4d742b730df368e063787c6808597ccb38636ed821d10b39ca51b/scipy-1.16.3-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7f68154688c515cdb541a31ef8eb66d8cd1050605be9dcd74199cbd22ac739bc", size = 23508512, upload-time = "2025-10-28T17:36:29.731Z" }, - { url = "https://files.pythonhosted.org/packages/1f/60/c45a12b98ad591536bfe5330cb3cfe1850d7570259303563b1721564d458/scipy-1.16.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8b3c820ddb80029fe9f43d61b81d8b488d3ef8ca010d15122b152db77dc94c22", size = 33413639, upload-time = "2025-10-28T17:36:37.982Z" }, - { url = "https://files.pythonhosted.org/packages/71/bc/35957d88645476307e4839712642896689df442f3e53b0fa016ecf8a3357/scipy-1.16.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d3837938ae715fc0fe3c39c0202de3a8853aff22ca66781ddc2ade7554b7e2cc", size = 35704729, upload-time = "2025-10-28T17:36:46.547Z" }, - { url = "https://files.pythonhosted.org/packages/3b/15/89105e659041b1ca11c386e9995aefacd513a78493656e57789f9d9eab61/scipy-1.16.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:aadd23f98f9cb069b3bd64ddc900c4d277778242e961751f77a8cb5c4b946fb0", size = 36086251, upload-time = "2025-10-28T17:36:55.161Z" }, - { url = "https://files.pythonhosted.org/packages/1a/87/c0ea673ac9c6cc50b3da2196d860273bc7389aa69b64efa8493bdd25b093/scipy-1.16.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b7c5f1bda1354d6a19bc6af73a649f8285ca63ac6b52e64e658a5a11d4d69800", size = 38716681, upload-time = "2025-10-28T17:37:04.1Z" }, - { url = "https://files.pythonhosted.org/packages/91/06/837893227b043fb9b0d13e4bd7586982d8136cb249ffb3492930dab905b8/scipy-1.16.3-cp314-cp314-win_amd64.whl", hash = "sha256:e5d42a9472e7579e473879a1990327830493a7047506d58d73fc429b84c1d49d", size = 39358423, upload-time = "2025-10-28T17:38:20.005Z" }, - { url = "https://files.pythonhosted.org/packages/95/03/28bce0355e4d34a7c034727505a02d19548549e190bedd13a721e35380b7/scipy-1.16.3-cp314-cp314-win_arm64.whl", hash = "sha256:6020470b9d00245926f2d5bb93b119ca0340f0d564eb6fbaad843eaebf9d690f", size = 26135027, upload-time = "2025-10-28T17:38:24.966Z" }, - { url = "https://files.pythonhosted.org/packages/b2/6f/69f1e2b682efe9de8fe9f91040f0cd32f13cfccba690512ba4c582b0bc29/scipy-1.16.3-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:e1d27cbcb4602680a49d787d90664fa4974063ac9d4134813332a8c53dbe667c", size = 37028379, upload-time = "2025-10-28T17:37:14.061Z" }, - { url = "https://files.pythonhosted.org/packages/7c/2d/e826f31624a5ebbab1cd93d30fd74349914753076ed0593e1d56a98c4fb4/scipy-1.16.3-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:9b9c9c07b6d56a35777a1b4cc8966118fb16cfd8daf6743867d17d36cfad2d40", size = 29400052, upload-time = "2025-10-28T17:37:21.709Z" }, - { url = "https://files.pythonhosted.org/packages/69/27/d24feb80155f41fd1f156bf144e7e049b4e2b9dd06261a242905e3bc7a03/scipy-1.16.3-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:3a4c460301fb2cffb7f88528f30b3127742cff583603aa7dc964a52c463b385d", size = 21391183, upload-time = "2025-10-28T17:37:29.559Z" }, - { url = "https://files.pythonhosted.org/packages/f8/d3/1b229e433074c5738a24277eca520a2319aac7465eea7310ea6ae0e98ae2/scipy-1.16.3-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:f667a4542cc8917af1db06366d3f78a5c8e83badd56409f94d1eac8d8d9133fa", size = 23930174, upload-time = "2025-10-28T17:37:36.306Z" }, - { url = "https://files.pythonhosted.org/packages/16/9d/d9e148b0ec680c0f042581a2be79a28a7ab66c0c4946697f9e7553ead337/scipy-1.16.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f379b54b77a597aa7ee5e697df0d66903e41b9c85a6dd7946159e356319158e8", size = 33497852, upload-time = "2025-10-28T17:37:42.228Z" }, - { url = "https://files.pythonhosted.org/packages/2f/22/4e5f7561e4f98b7bea63cf3fd7934bff1e3182e9f1626b089a679914d5c8/scipy-1.16.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4aff59800a3b7f786b70bfd6ab551001cb553244988d7d6b8299cb1ea653b353", size = 35798595, upload-time = "2025-10-28T17:37:48.102Z" }, - { url = "https://files.pythonhosted.org/packages/83/42/6644d714c179429fc7196857866f219fef25238319b650bb32dde7bf7a48/scipy-1.16.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:da7763f55885045036fabcebd80144b757d3db06ab0861415d1c3b7c69042146", size = 36186269, upload-time = "2025-10-28T17:37:53.72Z" }, - { url = "https://files.pythonhosted.org/packages/ac/70/64b4d7ca92f9cf2e6fc6aaa2eecf80bb9b6b985043a9583f32f8177ea122/scipy-1.16.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ffa6eea95283b2b8079b821dc11f50a17d0571c92b43e2b5b12764dc5f9b285d", size = 38802779, upload-time = "2025-10-28T17:37:59.393Z" }, - { url = "https://files.pythonhosted.org/packages/61/82/8d0e39f62764cce5ffd5284131e109f07cf8955aef9ab8ed4e3aa5e30539/scipy-1.16.3-cp314-cp314t-win_amd64.whl", hash = "sha256:d9f48cafc7ce94cf9b15c6bffdc443a81a27bf7075cf2dcd5c8b40f85d10c4e7", size = 39471128, upload-time = "2025-10-28T17:38:05.259Z" }, - { url = "https://files.pythonhosted.org/packages/64/47/a494741db7280eae6dc033510c319e34d42dd41b7ac0c7ead39354d1a2b5/scipy-1.16.3-cp314-cp314t-win_arm64.whl", hash = "sha256:21d9d6b197227a12dcbf9633320a4e34c6b0e51c57268df255a0942983bac562", size = 26464127, upload-time = "2025-10-28T17:38:11.34Z" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/3e/9cca699f3486ce6bc12ff46dc2031f1ec8eb9ccc9a320fdaf925f1417426/scipy-1.17.0.tar.gz", hash = "sha256:2591060c8e648d8b96439e111ac41fd8342fdeff1876be2e19dea3fe8930454e", size = 30396830, upload-time = "2026-01-10T21:34:23.009Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/4b/c89c131aa87cad2b77a54eb0fb94d633a842420fa7e919dc2f922037c3d8/scipy-1.17.0-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:2abd71643797bd8a106dff97894ff7869eeeb0af0f7a5ce02e4227c6a2e9d6fd", size = 31381316, upload-time = "2026-01-10T21:24:33.42Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5f/a6b38f79a07d74989224d5f11b55267714707582908a5f1ae854cf9a9b84/scipy-1.17.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:ef28d815f4d2686503e5f4f00edc387ae58dfd7a2f42e348bb53359538f01558", size = 27966760, upload-time = "2026-01-10T21:24:38.911Z" }, + { url = "https://files.pythonhosted.org/packages/c1/20/095ad24e031ee8ed3c5975954d816b8e7e2abd731e04f8be573de8740885/scipy-1.17.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:272a9f16d6bb4667e8b50d25d71eddcc2158a214df1b566319298de0939d2ab7", size = 20138701, upload-time = "2026-01-10T21:24:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/89/11/4aad2b3858d0337756f3323f8960755704e530b27eb2a94386c970c32cbe/scipy-1.17.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:7204fddcbec2fe6598f1c5fdf027e9f259106d05202a959a9f1aecf036adc9f6", size = 22480574, upload-time = "2026-01-10T21:24:47.266Z" }, + { url = "https://files.pythonhosted.org/packages/85/bd/f5af70c28c6da2227e510875cadf64879855193a687fb19951f0f44cfd6b/scipy-1.17.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc02c37a5639ee67d8fb646ffded6d793c06c5622d36b35cfa8fe5ececb8f042", size = 32862414, upload-time = "2026-01-10T21:24:52.566Z" }, + { url = "https://files.pythonhosted.org/packages/ef/df/df1457c4df3826e908879fe3d76bc5b6e60aae45f4ee42539512438cfd5d/scipy-1.17.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dac97a27520d66c12a34fd90a4fe65f43766c18c0d6e1c0a80f114d2260080e4", size = 35112380, upload-time = "2026-01-10T21:24:58.433Z" }, + { url = "https://files.pythonhosted.org/packages/5f/bb/88e2c16bd1dd4de19d80d7c5e238387182993c2fb13b4b8111e3927ad422/scipy-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb7446a39b3ae0fe8f416a9a3fdc6fba3f11c634f680f16a239c5187bc487c0", size = 34922676, upload-time = "2026-01-10T21:25:04.287Z" }, + { url = "https://files.pythonhosted.org/packages/02/ba/5120242cc735f71fc002cff0303d536af4405eb265f7c60742851e7ccfe9/scipy-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:474da16199f6af66601a01546144922ce402cb17362e07d82f5a6cf8f963e449", size = 37507599, upload-time = "2026-01-10T21:25:09.851Z" }, + { url = "https://files.pythonhosted.org/packages/52/c8/08629657ac6c0da198487ce8cd3de78e02cfde42b7f34117d56a3fe249dc/scipy-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:255c0da161bd7b32a6c898e7891509e8a9289f0b1c6c7d96142ee0d2b114c2ea", size = 36380284, upload-time = "2026-01-10T21:25:15.632Z" }, + { url = "https://files.pythonhosted.org/packages/6c/4a/465f96d42c6f33ad324a40049dfd63269891db9324aa66c4a1c108c6f994/scipy-1.17.0-cp311-cp311-win_arm64.whl", hash = "sha256:85b0ac3ad17fa3be50abd7e69d583d98792d7edc08367e01445a1e2076005379", size = 24370427, upload-time = "2026-01-10T21:25:20.514Z" }, + { url = "https://files.pythonhosted.org/packages/0b/11/7241a63e73ba5a516f1930ac8d5b44cbbfabd35ac73a2d08ca206df007c4/scipy-1.17.0-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:0d5018a57c24cb1dd828bcf51d7b10e65986d549f52ef5adb6b4d1ded3e32a57", size = 31364580, upload-time = "2026-01-10T21:25:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1d/5057f812d4f6adc91a20a2d6f2ebcdb517fdbc87ae3acc5633c9b97c8ba5/scipy-1.17.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:88c22af9e5d5a4f9e027e26772cc7b5922fab8bcc839edb3ae33de404feebd9e", size = 27969012, upload-time = "2026-01-10T21:25:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/e3/21/f6ec556c1e3b6ec4e088da667d9987bb77cc3ab3026511f427dc8451187d/scipy-1.17.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f3cd947f20fe17013d401b64e857c6b2da83cae567adbb75b9dcba865abc66d8", size = 20140691, upload-time = "2026-01-10T21:25:34.802Z" }, + { url = "https://files.pythonhosted.org/packages/7a/fe/5e5ad04784964ba964a96f16c8d4676aa1b51357199014dce58ab7ec5670/scipy-1.17.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e8c0b331c2c1f531eb51f1b4fc9ba709521a712cce58f1aa627bc007421a5306", size = 22463015, upload-time = "2026-01-10T21:25:39.277Z" }, + { url = "https://files.pythonhosted.org/packages/4a/69/7c347e857224fcaf32a34a05183b9d8a7aca25f8f2d10b8a698b8388561a/scipy-1.17.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5194c445d0a1c7a6c1a4a4681b6b7c71baad98ff66d96b949097e7513c9d6742", size = 32724197, upload-time = "2026-01-10T21:25:44.084Z" }, + { url = "https://files.pythonhosted.org/packages/d1/fe/66d73b76d378ba8cc2fe605920c0c75092e3a65ae746e1e767d9d020a75a/scipy-1.17.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9eeb9b5f5997f75507814ed9d298ab23f62cf79f5a3ef90031b1ee2506abdb5b", size = 35009148, upload-time = "2026-01-10T21:25:50.591Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/07dec27d9dc41c18d8c43c69e9e413431d20c53a0339c388bcf72f353c4b/scipy-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40052543f7bbe921df4408f46003d6f01c6af109b9e2c8a66dd1cf6cf57f7d5d", size = 34798766, upload-time = "2026-01-10T21:25:59.41Z" }, + { url = "https://files.pythonhosted.org/packages/81/61/0470810c8a093cdacd4ba7504b8a218fd49ca070d79eca23a615f5d9a0b0/scipy-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0cf46c8013fec9d3694dc572f0b54100c28405d55d3e2cb15e2895b25057996e", size = 37405953, upload-time = "2026-01-10T21:26:07.75Z" }, + { url = "https://files.pythonhosted.org/packages/92/ce/672ed546f96d5d41ae78c4b9b02006cedd0b3d6f2bf5bb76ea455c320c28/scipy-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:0937a0b0d8d593a198cededd4c439a0ea216a3f36653901ea1f3e4be949056f8", size = 36328121, upload-time = "2026-01-10T21:26:16.509Z" }, + { url = "https://files.pythonhosted.org/packages/9d/21/38165845392cae67b61843a52c6455d47d0cc2a40dd495c89f4362944654/scipy-1.17.0-cp312-cp312-win_arm64.whl", hash = "sha256:f603d8a5518c7426414d1d8f82e253e454471de682ce5e39c29adb0df1efb86b", size = 24314368, upload-time = "2026-01-10T21:26:23.087Z" }, + { url = "https://files.pythonhosted.org/packages/0c/51/3468fdfd49387ddefee1636f5cf6d03ce603b75205bf439bbf0e62069bfd/scipy-1.17.0-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:65ec32f3d32dfc48c72df4291345dae4f048749bc8d5203ee0a3f347f96c5ce6", size = 31344101, upload-time = "2026-01-10T21:26:30.25Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9a/9406aec58268d437636069419e6977af953d1e246df941d42d3720b7277b/scipy-1.17.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:1f9586a58039d7229ce77b52f8472c972448cded5736eaf102d5658bbac4c269", size = 27950385, upload-time = "2026-01-10T21:26:36.801Z" }, + { url = "https://files.pythonhosted.org/packages/4f/98/e7342709e17afdfd1b26b56ae499ef4939b45a23a00e471dfb5375eea205/scipy-1.17.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:9fad7d3578c877d606b1150135c2639e9de9cecd3705caa37b66862977cc3e72", size = 20122115, upload-time = "2026-01-10T21:26:42.107Z" }, + { url = "https://files.pythonhosted.org/packages/fd/0e/9eeeb5357a64fd157cbe0302c213517c541cc16b8486d82de251f3c68ede/scipy-1.17.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:423ca1f6584fc03936972b5f7c06961670dbba9f234e71676a7c7ccf938a0d61", size = 22442402, upload-time = "2026-01-10T21:26:48.029Z" }, + { url = "https://files.pythonhosted.org/packages/c9/10/be13397a0e434f98e0c79552b2b584ae5bb1c8b2be95db421533bbca5369/scipy-1.17.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe508b5690e9eaaa9467fc047f833af58f1152ae51a0d0aed67aa5801f4dd7d6", size = 32696338, upload-time = "2026-01-10T21:26:55.521Z" }, + { url = "https://files.pythonhosted.org/packages/63/1e/12fbf2a3bb240161651c94bb5cdd0eae5d4e8cc6eaeceb74ab07b12a753d/scipy-1.17.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6680f2dfd4f6182e7d6db161344537da644d1cf85cf293f015c60a17ecf08752", size = 34977201, upload-time = "2026-01-10T21:27:03.501Z" }, + { url = "https://files.pythonhosted.org/packages/19/5b/1a63923e23ccd20bd32156d7dd708af5bbde410daa993aa2500c847ab2d2/scipy-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eec3842ec9ac9de5917899b277428886042a93db0b227ebbe3a333b64ec7643d", size = 34777384, upload-time = "2026-01-10T21:27:11.423Z" }, + { url = "https://files.pythonhosted.org/packages/39/22/b5da95d74edcf81e540e467202a988c50fef41bd2011f46e05f72ba07df6/scipy-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d7425fcafbc09a03731e1bc05581f5fad988e48c6a861f441b7ab729a49a55ea", size = 37379586, upload-time = "2026-01-10T21:27:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/b9/b6/8ac583d6da79e7b9e520579f03007cb006f063642afd6b2eeb16b890bf93/scipy-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:87b411e42b425b84777718cc41516b8a7e0795abfa8e8e1d573bf0ef014f0812", size = 36287211, upload-time = "2026-01-10T21:28:43.122Z" }, + { url = "https://files.pythonhosted.org/packages/55/fb/7db19e0b3e52f882b420417644ec81dd57eeef1bd1705b6f689d8ff93541/scipy-1.17.0-cp313-cp313-win_arm64.whl", hash = "sha256:357ca001c6e37601066092e7c89cca2f1ce74e2a520ca78d063a6d2201101df2", size = 24312646, upload-time = "2026-01-10T21:28:49.893Z" }, + { url = "https://files.pythonhosted.org/packages/20/b6/7feaa252c21cc7aff335c6c55e1b90ab3e3306da3f048109b8b639b94648/scipy-1.17.0-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:ec0827aa4d36cb79ff1b81de898e948a51ac0b9b1c43e4a372c0508c38c0f9a3", size = 31693194, upload-time = "2026-01-10T21:27:27.454Z" }, + { url = "https://files.pythonhosted.org/packages/76/bb/bbb392005abce039fb7e672cb78ac7d158700e826b0515cab6b5b60c26fb/scipy-1.17.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:819fc26862b4b3c73a60d486dbb919202f3d6d98c87cf20c223511429f2d1a97", size = 28365415, upload-time = "2026-01-10T21:27:34.26Z" }, + { url = "https://files.pythonhosted.org/packages/37/da/9d33196ecc99fba16a409c691ed464a3a283ac454a34a13a3a57c0d66f3a/scipy-1.17.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:363ad4ae2853d88ebcde3ae6ec46ccca903ea9835ee8ba543f12f575e7b07e4e", size = 20537232, upload-time = "2026-01-10T21:27:40.306Z" }, + { url = "https://files.pythonhosted.org/packages/56/9d/f4b184f6ddb28e9a5caea36a6f98e8ecd2a524f9127354087ce780885d83/scipy-1.17.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:979c3a0ff8e5ba254d45d59ebd38cde48fce4f10b5125c680c7a4bfe177aab07", size = 22791051, upload-time = "2026-01-10T21:27:46.539Z" }, + { url = "https://files.pythonhosted.org/packages/9b/9d/025cccdd738a72140efc582b1641d0dd4caf2e86c3fb127568dc80444e6e/scipy-1.17.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:130d12926ae34399d157de777472bf82e9061c60cc081372b3118edacafe1d00", size = 32815098, upload-time = "2026-01-10T21:27:54.389Z" }, + { url = "https://files.pythonhosted.org/packages/48/5f/09b879619f8bca15ce392bfc1894bd9c54377e01d1b3f2f3b595a1b4d945/scipy-1.17.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e886000eb4919eae3a44f035e63f0fd8b651234117e8f6f29bad1cd26e7bc45", size = 35031342, upload-time = "2026-01-10T21:28:03.012Z" }, + { url = "https://files.pythonhosted.org/packages/f2/9a/f0f0a9f0aa079d2f106555b984ff0fbb11a837df280f04f71f056ea9c6e4/scipy-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:13c4096ac6bc31d706018f06a49abe0485f96499deb82066b94d19b02f664209", size = 34893199, upload-time = "2026-01-10T21:28:10.832Z" }, + { url = "https://files.pythonhosted.org/packages/90/b8/4f0f5cf0c5ea4d7548424e6533e6b17d164f34a6e2fb2e43ffebb6697b06/scipy-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cacbaddd91fcffde703934897c5cd2c7cb0371fac195d383f4e1f1c5d3f3bd04", size = 37438061, upload-time = "2026-01-10T21:28:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/f9/cc/2bd59140ed3b2fa2882fb15da0a9cb1b5a6443d67cfd0d98d4cec83a57ec/scipy-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:edce1a1cf66298cccdc48a1bdf8fb10a3bf58e8b58d6c3883dd1530e103f87c0", size = 36328593, upload-time = "2026-01-10T21:28:28.007Z" }, + { url = "https://files.pythonhosted.org/packages/13/1b/c87cc44a0d2c7aaf0f003aef2904c3d097b422a96c7e7c07f5efd9073c1b/scipy-1.17.0-cp313-cp313t-win_arm64.whl", hash = "sha256:30509da9dbec1c2ed8f168b8d8aa853bc6723fede1dbc23c7d43a56f5ab72a67", size = 24625083, upload-time = "2026-01-10T21:28:35.188Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2d/51006cd369b8e7879e1c630999a19d1fbf6f8b5ed3e33374f29dc87e53b3/scipy-1.17.0-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:c17514d11b78be8f7e6331b983a65a7f5ca1fd037b95e27b280921fe5606286a", size = 31346803, upload-time = "2026-01-10T21:28:57.24Z" }, + { url = "https://files.pythonhosted.org/packages/d6/2e/2349458c3ce445f53a6c93d4386b1c4c5c0c540917304c01222ff95ff317/scipy-1.17.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:4e00562e519c09da34c31685f6acc3aa384d4d50604db0f245c14e1b4488bfa2", size = 27967182, upload-time = "2026-01-10T21:29:04.107Z" }, + { url = "https://files.pythonhosted.org/packages/5e/7c/df525fbfa77b878d1cfe625249529514dc02f4fd5f45f0f6295676a76528/scipy-1.17.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:f7df7941d71314e60a481e02d5ebcb3f0185b8d799c70d03d8258f6c80f3d467", size = 20139125, upload-time = "2026-01-10T21:29:10.179Z" }, + { url = "https://files.pythonhosted.org/packages/33/11/fcf9d43a7ed1234d31765ec643b0515a85a30b58eddccc5d5a4d12b5f194/scipy-1.17.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:aabf057c632798832f071a8dde013c2e26284043934f53b00489f1773b33527e", size = 22443554, upload-time = "2026-01-10T21:29:15.888Z" }, + { url = "https://files.pythonhosted.org/packages/80/5c/ea5d239cda2dd3d31399424967a24d556cf409fbea7b5b21412b0fd0a44f/scipy-1.17.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a38c3337e00be6fd8a95b4ed66b5d988bac4ec888fd922c2ea9fe5fb1603dd67", size = 32757834, upload-time = "2026-01-10T21:29:23.406Z" }, + { url = "https://files.pythonhosted.org/packages/b8/7e/8c917cc573310e5dc91cbeead76f1b600d3fb17cf0969db02c9cf92e3cfa/scipy-1.17.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00fb5f8ec8398ad90215008d8b6009c9db9fa924fd4c7d6be307c6f945f9cd73", size = 34995775, upload-time = "2026-01-10T21:29:31.915Z" }, + { url = "https://files.pythonhosted.org/packages/c5/43/176c0c3c07b3f7df324e7cdd933d3e2c4898ca202b090bd5ba122f9fe270/scipy-1.17.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f2a4942b0f5f7c23c7cd641a0ca1955e2ae83dedcff537e3a0259096635e186b", size = 34841240, upload-time = "2026-01-10T21:29:39.995Z" }, + { url = "https://files.pythonhosted.org/packages/44/8c/d1f5f4b491160592e7f084d997de53a8e896a3ac01cd07e59f43ca222744/scipy-1.17.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:dbf133ced83889583156566d2bdf7a07ff89228fe0c0cb727f777de92092ec6b", size = 37394463, upload-time = "2026-01-10T21:29:48.723Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ec/42a6657f8d2d087e750e9a5dde0b481fd135657f09eaf1cf5688bb23c338/scipy-1.17.0-cp314-cp314-win_amd64.whl", hash = "sha256:3625c631a7acd7cfd929e4e31d2582cf00f42fcf06011f59281271746d77e061", size = 37053015, upload-time = "2026-01-10T21:30:51.418Z" }, + { url = "https://files.pythonhosted.org/packages/27/58/6b89a6afd132787d89a362d443a7bddd511b8f41336a1ae47f9e4f000dc4/scipy-1.17.0-cp314-cp314-win_arm64.whl", hash = "sha256:9244608d27eafe02b20558523ba57f15c689357c85bdcfe920b1828750aa26eb", size = 24951312, upload-time = "2026-01-10T21:30:56.771Z" }, + { url = "https://files.pythonhosted.org/packages/e9/01/f58916b9d9ae0112b86d7c3b10b9e685625ce6e8248df139d0fcb17f7397/scipy-1.17.0-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:2b531f57e09c946f56ad0b4a3b2abee778789097871fc541e267d2eca081cff1", size = 31706502, upload-time = "2026-01-10T21:29:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/59/8e/2912a87f94a7d1f8b38aabc0faf74b82d3b6c9e22be991c49979f0eceed8/scipy-1.17.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:13e861634a2c480bd237deb69333ac79ea1941b94568d4b0efa5db5e263d4fd1", size = 28380854, upload-time = "2026-01-10T21:30:01.554Z" }, + { url = "https://files.pythonhosted.org/packages/bd/1c/874137a52dddab7d5d595c1887089a2125d27d0601fce8c0026a24a92a0b/scipy-1.17.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:eb2651271135154aa24f6481cbae5cc8af1f0dd46e6533fb7b56aa9727b6a232", size = 20552752, upload-time = "2026-01-10T21:30:05.93Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f0/7518d171cb735f6400f4576cf70f756d5b419a07fe1867da34e2c2c9c11b/scipy-1.17.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:c5e8647f60679790c2f5c76be17e2e9247dc6b98ad0d3b065861e082c56e078d", size = 22803972, upload-time = "2026-01-10T21:30:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/7c/74/3498563a2c619e8a3ebb4d75457486c249b19b5b04a30600dfd9af06bea5/scipy-1.17.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5fb10d17e649e1446410895639f3385fd2bf4c3c7dfc9bea937bddcbc3d7b9ba", size = 32829770, upload-time = "2026-01-10T21:30:16.359Z" }, + { url = "https://files.pythonhosted.org/packages/48/d1/7b50cedd8c6c9d6f706b4b36fa8544d829c712a75e370f763b318e9638c1/scipy-1.17.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8547e7c57f932e7354a2319fab613981cde910631979f74c9b542bb167a8b9db", size = 35051093, upload-time = "2026-01-10T21:30:22.987Z" }, + { url = "https://files.pythonhosted.org/packages/e2/82/a2d684dfddb87ba1b3ea325df7c3293496ee9accb3a19abe9429bce94755/scipy-1.17.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33af70d040e8af9d5e7a38b5ed3b772adddd281e3062ff23fec49e49681c38cf", size = 34909905, upload-time = "2026-01-10T21:30:28.704Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5e/e565bd73991d42023eb82bb99e51c5b3d9e2c588ca9d4b3e2cc1d3ca62a6/scipy-1.17.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb55bb97d00f8b7ab95cb64f873eb0bf54d9446264d9f3609130381233483f", size = 37457743, upload-time = "2026-01-10T21:30:34.819Z" }, + { url = "https://files.pythonhosted.org/packages/58/a8/a66a75c3d8f1fb2b83f66007d6455a06a6f6cf5618c3dc35bc9b69dd096e/scipy-1.17.0-cp314-cp314t-win_amd64.whl", hash = "sha256:1ff269abf702f6c7e67a4b7aad981d42871a11b9dd83c58d2d2ea624efbd1088", size = 37098574, upload-time = "2026-01-10T21:30:40.782Z" }, + { url = "https://files.pythonhosted.org/packages/56/a5/df8f46ef7da168f1bc52cd86e09a9de5c6f19cc1da04454d51b7d4f43408/scipy-1.17.0-cp314-cp314t-win_arm64.whl", hash = "sha256:031121914e295d9791319a1875444d55079885bbae5bdc9c5e0f2ee5f09d34ff", size = 25246266, upload-time = "2026-01-10T21:30:45.923Z" }, ] [[package]] @@ -5996,7 +5998,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "matplotlib", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "numpy", version = "2.2.6", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux') or (python_full_version < '3.11' and sys_platform == 'win32')" }, - { name = "numpy", version = "2.4.0", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, + { name = "numpy", version = "2.4.1", source = { registry = "https://pypi.org/simple" }, marker = "(python_full_version >= '3.11' and sys_platform == 'darwin') or (python_full_version >= '3.11' and sys_platform == 'linux') or (python_full_version >= '3.11' and sys_platform == 'win32')" }, { name = "pandas", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696, upload-time = "2024-01-25T13:21:52.551Z" } @@ -6490,51 +6492,56 @@ wheels = [ [[package]] name = "tomli" -version = "2.3.0" +version = "2.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, - { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, - { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, - { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, - { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, - { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, - { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, - { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, - { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, - { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, - { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, - { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, - { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, - { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, - { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, - { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, - { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, - { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, - { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, - { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, - { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, - { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, - { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, - { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, - { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, - { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, - { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, - { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, - { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, - { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, - { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, - { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, - { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, - { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, - { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, - { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] [[package]] @@ -6654,28 +6661,28 @@ wheels = [ [[package]] name = "uv" -version = "0.9.23" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/3b/21fe8951468c355fa4bfc7fc5d1c97f4bf414d29bf9308eb7781083ed084/uv-0.9.23.tar.gz", hash = "sha256:9bfa6816d76119ad3e6a97c9d1cb2022a166af31cd442bd4e42f674c8f9ab87d", size = 3852934, upload-time = "2026-01-09T19:46:06.417Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/b9/83bd7a85e0187ebbf5ae7e5dbb6c20581579fac5062073fc191ba7ae90e2/uv-0.9.23-py3-none-linux_armv6l.whl", hash = "sha256:1822c0e8bd15d2d377aa06360885262ad569547611078e2d83cc9a3cb57b72a9", size = 21407564, upload-time = "2026-01-09T19:45:15.468Z" }, - { url = "https://files.pythonhosted.org/packages/29/99/e430e0cee02815eafb22ad2f35ffa4ade975b0dc4819eb804bc6efa2d2aa/uv-0.9.23-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f5ca37e98ed8a8f7e8b6bb94758981f92e5e996cc3c32b3f718e82405fc4726e", size = 20568405, upload-time = "2026-01-09T19:45:25.397Z" }, - { url = "https://files.pythonhosted.org/packages/6f/11/cea89a5edbb6af28ac64114dc25abdc2d9ea81b5673207e499bef839cfeb/uv-0.9.23-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4406c03091586fe7a00d7b0d03f5f6b5f73b5a62fd555ccb9ddd5346626dc43", size = 19014722, upload-time = "2026-01-09T19:45:52.568Z" }, - { url = "https://files.pythonhosted.org/packages/a8/26/d38ff6987aa971b8fbee036a356083bd9fd6605ac20cebc1fcc6dbff38b6/uv-0.9.23-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:afbcb83949b180236024b546ca93a0068336e8b3ccebb28d2721a14bb933dd8c", size = 20893728, upload-time = "2026-01-09T19:45:29.075Z" }, - { url = "https://files.pythonhosted.org/packages/00/68/0eb2740ec2357d5e7b44eb5d33a02e4f8d74fc41ee6ff4b93f7a77f726b8/uv-0.9.23-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4556bf96b54a5673b88178054c1c8d6e39d1f2642590677c777b4d121fa96e1", size = 21032944, upload-time = "2026-01-09T19:45:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/d2/69/11418b991ea3fc4d555d352e27204e8e2ce67cc6b64a7708f3821693de37/uv-0.9.23-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c77f9a44d4d7333ae264c3810e52882b0877d90c66af43b42d73e38a1158fde1", size = 21952078, upload-time = "2026-01-09T19:46:03.957Z" }, - { url = "https://files.pythonhosted.org/packages/86/35/c7c7e1b05efc2b6122f14de18cd0691a64c8a30fd44a0ec0bdc25887b156/uv-0.9.23-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f5a9fe2b472628c239d56765786b79b894d37ace334a0868c158795fa0c9d454", size = 23645955, upload-time = "2026-01-09T19:45:11.959Z" }, - { url = "https://files.pythonhosted.org/packages/5f/c1/51a60b7c0172b5d4f29e605fe54c5fe43db16d055285cd3414d2ad1cee5f/uv-0.9.23-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:940c4f88dadb91910f2643f71a83ec7700359a43e77a1e08731acbb70654f15b", size = 23231613, upload-time = "2026-01-09T19:45:36.141Z" }, - { url = "https://files.pythonhosted.org/packages/5e/75/61da4afd9b838fbfad24f5abf826e1e73b047ae975c0b4c52d11b129015f/uv-0.9.23-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af33450dd4d4b1544f6872725e666177a77751b2c7490998688210743a5b44ec", size = 22277438, upload-time = "2026-01-09T19:45:21.978Z" }, - { url = "https://files.pythonhosted.org/packages/ea/4a/9bba41dfe207c4083de036fdf08d4db779faf1bc05b00e86cb06a5d4d97a/uv-0.9.23-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:645ef5a067340320722b84c2abdd21587bda15be39e7ebf88ae5ace2be6fb192", size = 22326761, upload-time = "2026-01-09T19:45:56.143Z" }, - { url = "https://files.pythonhosted.org/packages/a8/97/111edb3b979f5a4433e040fad7033853408d8d0d8d7f98a9976984f77fb9/uv-0.9.23-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:4c5e4af9ec5450d75807b13d70bd7f57a19bad22dc8a2ddf313bddad5f672ea8", size = 21008087, upload-time = "2026-01-09T19:45:00.787Z" }, - { url = "https://files.pythonhosted.org/packages/53/ed/801df59697a18c5dda3c88b9323a06543bb1c5d15e46012d5c579afd0e72/uv-0.9.23-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5d97f91d49c95561dc16b2290f8ec34f5f7d22f8a81935b0c798ec053a164bdf", size = 22162896, upload-time = "2026-01-09T19:45:32.642Z" }, - { url = "https://files.pythonhosted.org/packages/ab/fe/413233c15f1e7dece47fabf5809f98f851a6c1dd72ae717bf26e86d5ceb7/uv-0.9.23-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:5d7e9a9e54510fdd3ad0ff94eda1bb13eb1fd3c33905e45228195b265c449b0f", size = 20991335, upload-time = "2026-01-09T19:45:42.065Z" }, - { url = "https://files.pythonhosted.org/packages/ec/b8/79ce50366ff480879e6052c906c6bd48a70ea47f691ef9e7f13686b4d361/uv-0.9.23-py3-none-musllinux_1_1_i686.whl", hash = "sha256:e039205eef9cb91ff925da4f7d7507cf630c24b7fd043352a33d19cc9790f9f8", size = 21407799, upload-time = "2026-01-09T19:45:08.36Z" }, - { url = "https://files.pythonhosted.org/packages/da/5e/1ceeef97798d61497f7fe064eb7584cd26a24ca89ec95bb3a797ecad648e/uv-0.9.23-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6391fab501516966dca12b14d6008201df47734c0d4b949602af9fc2e110e7e7", size = 22537456, upload-time = "2026-01-09T19:45:49.242Z" }, - { url = "https://files.pythonhosted.org/packages/17/a4/cafa5146cbedd90afc52b39edf591bac266e1a4f576180eb3cdb76fe6615/uv-0.9.23-py3-none-win32.whl", hash = "sha256:6de8501557603a672efb4ff7429e89e2d45a32ecd7f6b017544335b22da12423", size = 20213704, upload-time = "2026-01-09T19:45:45.736Z" }, - { url = "https://files.pythonhosted.org/packages/b7/9e/738c62166050f534d6620c3dc26066a4419b79e4aeac988b532d14169afd/uv-0.9.23-py3-none-win_amd64.whl", hash = "sha256:6091a85084ec882db9ebf84503a405a2e20da73fcbb7ad3697668e9f198cb167", size = 22292514, upload-time = "2026-01-09T19:46:00.123Z" }, - { url = "https://files.pythonhosted.org/packages/14/84/830eb6bb16fc1320093e90be8ce06a09f85a78dc08c0e912a318cd9ad9c5/uv-0.9.23-py3-none-win_arm64.whl", hash = "sha256:097be29902bb8e58cde8f02f09e0679ac04e9736605d6d8caa2ecb0d0aadb49c", size = 20603593, upload-time = "2026-01-09T19:45:18.768Z" }, +version = "0.9.24" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/7f/6692596de7775b3059a55539aed2eec16a0642a2d6d3510baa5878287ce4/uv-0.9.24.tar.gz", hash = "sha256:d59d31c25fc530c68db9164174efac511a25fc882cec49cd48f75a18e7ebd6d5", size = 3852673, upload-time = "2026-01-09T22:34:31.635Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/51/10bb9541c40a5b4672527c357997a30fdf38b75e7bbaad0c37ed70889efa/uv-0.9.24-py3-none-linux_armv6l.whl", hash = "sha256:75a000f529ec92235b10fb5e16ca41f23f46c643308fd6c5b0d7b73ca056c5b9", size = 21395664, upload-time = "2026-01-09T22:34:05.887Z" }, + { url = "https://files.pythonhosted.org/packages/ec/dd/d7df524cb764ebc652e0c8bf9abe55fc34391adc2e4ab1d47375222b38a9/uv-0.9.24-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:207c8a2d4c4d55589feb63b4be74f6ff6ab92fa81b14a6515007ccec5a868ae0", size = 20547988, upload-time = "2026-01-09T22:34:16.21Z" }, + { url = "https://files.pythonhosted.org/packages/49/e4/7ca5e7eaed4b2b9d407aa5aeeb8f71cace7db77f30a63139bbbfdfe4770c/uv-0.9.24-py3-none-macosx_11_0_arm64.whl", hash = "sha256:44c0b8a78724e4cfa8e9c0266023c70fc792d0b39a5da17f5f847af2b530796b", size = 19033208, upload-time = "2026-01-09T22:33:50.91Z" }, + { url = "https://files.pythonhosted.org/packages/27/05/b7bab99541056537747bfdc55fdc97a4ba998e2b53cf855411ef176c412b/uv-0.9.24-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:841ede01d6dcf1676a21dce05f3647ba171c1d92768a03e8b8b6b7354b34a6d2", size = 20872212, upload-time = "2026-01-09T22:33:58.007Z" }, + { url = "https://files.pythonhosted.org/packages/d3/93/3a69cf481175766ee6018afb281666de12ccc04367d20a41dc070be8b422/uv-0.9.24-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:69531d9a8772afb2dff68fef2469f666e4f8a0132b2109e36541c423415835da", size = 21017966, upload-time = "2026-01-09T22:34:29.354Z" }, + { url = "https://files.pythonhosted.org/packages/17/40/7aec2d428e57a3ec992efc49bbc71e4a0ceece5a726751c661ddc3f41315/uv-0.9.24-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6720c9939cca7daff3cccc35dd896bbe139d7d463c62cba8dbbc474ff8eb93d1", size = 21943358, upload-time = "2026-01-09T22:34:08.63Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f4/2aa5b275aa8e5edb659036e94bae13ae294377384cf2a93a8d742a38050f/uv-0.9.24-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d7d1333d9c21088c89cb284ef29fdf48dc2015fe993174a823a3e7c991db90f9", size = 23672949, upload-time = "2026-01-09T22:34:03.113Z" }, + { url = "https://files.pythonhosted.org/packages/8e/24/2589bed4b39394c799472f841e0580318a8b7e69ef103a0ab50cf1c39dff/uv-0.9.24-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b610d89d6025000d08cd9bd458c6e264003a0ecfdaa8e4eba28938130cd1837", size = 23270210, upload-time = "2026-01-09T22:34:13.94Z" }, + { url = "https://files.pythonhosted.org/packages/80/3a/034494492a1ad1f95371c6fd735e4b7d180b8c1712c88b0f32a34d6352fd/uv-0.9.24-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38c59e18fe5fa42f7baeb4f08c94914cee6d87ff8faa6fc95c994dbc0de26c90", size = 22282247, upload-time = "2026-01-09T22:33:53.362Z" }, + { url = "https://files.pythonhosted.org/packages/be/0e/d8ab2c4fa6c9410a8a37fa6608d460b0126cee2efed9eecf516cdec72a1a/uv-0.9.24-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009cc82cdfc48add6ec13a0c4ffbb788ae2cab53573b4218069ca626721a404b", size = 22348801, upload-time = "2026-01-09T22:34:00.46Z" }, + { url = "https://files.pythonhosted.org/packages/50/fa/7217764e4936d6fda1944d956452bf94f790ae8a02cb3e5aa496d23fcb25/uv-0.9.24-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1914d33e526167dc202ec4a59119c68467b31f7c71dcf8b1077571d091ca3e7c", size = 21000825, upload-time = "2026-01-09T22:34:21.811Z" }, + { url = "https://files.pythonhosted.org/packages/94/8f/533db58a36895142b0c11eedf8bfe11c4724fb37deaa417bfb0c689d40b8/uv-0.9.24-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:aafe7dd9b633672054cf27f1a8e4127506324631f1af5edd051728f4f8085351", size = 22149066, upload-time = "2026-01-09T22:33:45.722Z" }, + { url = "https://files.pythonhosted.org/packages/cf/c7/e6eccd96341a548f0405bffdf55e7f30b5c0757cd1b8f7578e0972a66002/uv-0.9.24-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:63a0a46693098cf8446e41bd5d9ce7d3bc9b775a63fe0c8405ab6ee328424d46", size = 20993489, upload-time = "2026-01-09T22:34:27.007Z" }, + { url = "https://files.pythonhosted.org/packages/46/07/32d852d2d40c003b52601c44202c9d9e655c485fae5d84e42f326814b0be/uv-0.9.24-py3-none-musllinux_1_1_i686.whl", hash = "sha256:15d3955bfb03a7b78aaf5afb639cedefdf0fc35ff844c92e3fe6e8700b94b84f", size = 21400775, upload-time = "2026-01-09T22:34:24.278Z" }, + { url = "https://files.pythonhosted.org/packages/b0/58/f8e94226126011ba2e2e9d59c6190dc7fe9e61fa7ef4ca720d7226c1482b/uv-0.9.24-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:488a07e59fb417bf86de5630197223b7a0223229e626afc124c26827db78cff8", size = 22554194, upload-time = "2026-01-09T22:34:18.504Z" }, + { url = "https://files.pythonhosted.org/packages/da/8e/b540c304039a6561ba8e9a673009cfe1451f989d2269fe40690901ddb233/uv-0.9.24-py3-none-win32.whl", hash = "sha256:68a3186074c03876ee06b68730d5ff69a430296760d917ebbbb8e3fb54fb4091", size = 20203184, upload-time = "2026-01-09T22:34:11.02Z" }, + { url = "https://files.pythonhosted.org/packages/16/59/dba7c5feec1f694183578435eaae0d759b8c459c5e4f91237a166841a116/uv-0.9.24-py3-none-win_amd64.whl", hash = "sha256:8cd626306b415491f839b1a9100da6795c82c44d4cf278dd7ace7a774af89df4", size = 22294050, upload-time = "2026-01-09T22:33:48.228Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ef/e58fb288bafb5a8b5d4994e73fa6e062e408680e5a20d0427d5f4f66d8b1/uv-0.9.24-py3-none-win_arm64.whl", hash = "sha256:8d3c0fec7aa17f936a5b258816e856647b21f978a81bcfb2dc8caf2892a4965e", size = 20620004, upload-time = "2026-01-09T22:33:55.62Z" }, ] [[package]]