diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore new file mode 100644 index 0000000000..4083037423 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore @@ -0,0 +1 @@ +local diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py new file mode 100644 index 0000000000..f12d1cd92f --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py @@ -0,0 +1,5 @@ +from ._agent import HatchetAgent +from ._mcp_server import HatchetMCPServer +from ._model import HatchetModel + +__all__ = ['HatchetAgent', 'HatchetModel', 'HatchetMCPServer'] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py new file mode 100644 index 0000000000..ea626614f2 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -0,0 +1,671 @@ +from __future__ import annotations + +from collections.abc import AsyncIterable, AsyncIterator, Iterator, Sequence +from contextlib import AbstractAsyncContextManager, asynccontextmanager, contextmanager +from typing import Any, Generic, overload +from uuid import uuid4 + +from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions +from hatchet_sdk.runnables.workflow import BaseWorkflow +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter +from typing_extensions import Never + +from pydantic_ai import _utils, messages as _messages, models, usage as _usage +from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, WrapperAgent +from pydantic_ai.agent.abstract import Instructions, RunOutputDataT +from pydantic_ai.exceptions import UserError +from pydantic_ai.messages import AgentStreamEvent +from pydantic_ai.models import Model +from pydantic_ai.output import OutputDataT, OutputSpec +from pydantic_ai.result import StreamedRunResult +from pydantic_ai.settings import ModelSettings +from pydantic_ai.tools import ( + AgentDepsT, + DeferredToolResults, + RunContext, + Tool, + ToolFuncEither, +) +from pydantic_ai.toolsets import AbstractToolset + +from ._model import HatchetModel +from ._run_context import HatchetRunContext +from ._utils import TaskConfig + + +class RunAgentInput(BaseModel, Generic[RunOutputDataT, AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + + user_prompt: str | Sequence[_messages.UserContent] | None = None + output_type: OutputSpec[RunOutputDataT] | None = None + message_history: list[_messages.ModelMessage] | None = None + deferred_tool_results: DeferredToolResults | None = None + model: models.Model | models.KnownModelName | str | None = None + deps: AgentDepsT + model_settings: ModelSettings | None = None + usage_limits: _usage.UsageLimits | None = None + usage: _usage.RunUsage | None = None + infer_name: bool = True + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None + deprecated_kwargs: dict[str, Any] = Field(default_factory=dict) + + +class EventStreamHandlerInput(BaseModel, Generic[AgentDepsT]): + event: _messages.AgentStreamEvent + serialized_run_context: Any + deps: AgentDepsT + + +class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): + def __init__( + self, + wrapped: AbstractAgent[AgentDepsT, OutputDataT], + hatchet: Hatchet, + *, + name: str | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + mcp_task_config: TaskConfig | None = None, + model_task_config: TaskConfig | None = None, + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], + ): + """Wrap an agent to enable it with Hatchet durable tasks, by automatically offloading model requests, tool calls, and MCP server communication to Hatchet tasks. + + After wrapping, the original agent can still be used as normal outside of the Hatchet workflow. + + Args: + wrapped: The agent to wrap. + hatchet: The Hatchet instance to use for creating tasks. + name: Optional unique agent name to use in the Hatchet tasks' names. If not provided, the agent's `name` will be used. + event_stream_handler: Optional event stream handler to use for this agent. + mcp_task_config: The base Hatchet task config to use for MCP server tasks. If no config is provided, use the default settings. + model_task_config: The Hatchet task config to use for model request tasks. If no config is provided, use the default settings. + run_context_type: The `HatchetRunContext` (sub)class that's used to serialize and deserialize the run context. + """ + super().__init__(wrapped) + + self._name = name or wrapped.name + self._event_stream_handler = event_stream_handler + self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type + + self._hatchet = hatchet + + if not self._name: + raise UserError( + "An agent needs to have a unique `name` in order to be used with Hatchet. The name will be used to identify the agent's workflows and tasks." + ) + + if not isinstance(wrapped.model, Model): + raise UserError( + 'An agent needs to have a `model` in order to be used with Hatchet, it cannot be set at agent run time.' + ) + + self._model = HatchetModel( + wrapped.model, + task_name_prefix=self._name, + task_config=model_task_config or TaskConfig(), + hatchet=self._hatchet, + deps_type=self.deps_type, + run_context_type=self.run_context_type, + event_stream_handler=self.event_stream_handler, + ) + hatchet_agent_name = self._name + + def hatchetize_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: + from ._toolset import hatchetize_toolset + + return hatchetize_toolset( + toolset, + hatchet=hatchet, + task_name_prefix=hatchet_agent_name, + task_config=mcp_task_config or TaskConfig(), + deps_type=self.deps_type, + run_context_type=run_context_type, + ) + + self._toolsets = [toolset.visit_and_replace(hatchetize_toolset) for toolset in wrapped.toolsets] + + @hatchet.durable_task(name=f'{self._name}.run', input_validator=RunAgentInput[Any, Any]) + async def wrapped_run_workflow( + input: RunAgentInput[RunOutputDataT, AgentDepsT], + _ctx: DurableContext, + ) -> AgentRunResult[Any]: + with self._hatchet_overrides(): + return await super(WrapperAgent, self).run( + input.user_prompt, + output_type=input.output_type, + message_history=input.message_history, + deferred_tool_results=input.deferred_tool_results, + model=input.model, + deps=input.deps, + model_settings=input.model_settings, + usage_limits=input.usage_limits, + usage=input.usage, + infer_name=input.infer_name, + toolsets=input.toolsets, + **input.deprecated_kwargs, + ) + + self.hatchet_wrapped_run_workflow = wrapped_run_workflow + + @hatchet.durable_task(name=f'{self._name}.run_stream', input_validator=RunAgentInput[Any, Any]) + async def wrapped_run_stream_workflow( + input: RunAgentInput[RunOutputDataT, AgentDepsT], + hctx: DurableContext, + ) -> AgentRunResult[Any]: + async def event_stream_handler( + ctx: RunContext[AgentDepsT], events: AsyncIterable[AgentStreamEvent] + ) -> None: + async for event in events: + b = TypeAdapter[AgentStreamEvent](AgentStreamEvent).dump_json(event) + await hctx.aio_put_stream(b) + + return await wrapped.run( + input.user_prompt, + output_type=input.output_type, + message_history=input.message_history, + deferred_tool_results=input.deferred_tool_results, + model=self._model, + deps=input.deps, + model_settings=input.model_settings, + usage_limits=input.usage_limits, + usage=input.usage, + infer_name=input.infer_name, + toolsets=self._toolsets, + event_stream_handler=event_stream_handler, + **input.deprecated_kwargs, + ) + + self.hatchet_wrapped_run_stream_workflow = wrapped_run_stream_workflow + + @property + def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: + return self._event_stream_handler or super().event_stream_handler + + @property + def name(self) -> str | None: + return self._name + + @name.setter + def name(self, value: str | None) -> None: # pragma: no cover + raise UserError( + 'The agent name cannot be changed after creation. If you need to change the name, create a new agent.' + ) + + @property + def model(self) -> Model: + return self._model + + @property + def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: + with self._hatchet_overrides(): + return super().toolsets + + @contextmanager + def _hatchet_overrides(self) -> Iterator[None]: + with super().override(model=self._model, toolsets=self._toolsets, tools=[]): + yield + + @property + def workflows(self) -> list[BaseWorkflow[Any]]: + workflows: list[BaseWorkflow[Any]] = [ + self.hatchet_wrapped_run_workflow, + self.hatchet_wrapped_run_stream_workflow, + self._model.hatchet_wrapped_request_task, + self._model.hatchet_wrapped_request_stream_task, + ] + + for toolset in self._toolsets: + from ._toolset import HatchetWrapperToolset + + if isinstance(toolset, HatchetWrapperToolset): + workflows.extend(toolset.hatchet_tasks) + + return workflows + + @overload + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[OutputDataT]: ... + + @overload + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[RunOutputDataT]: ... + + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AgentRunResult[Any]: + """Run the agent with a user prompt in async mode.""" + agent_run_id = uuid4() + + result = await self.hatchet_wrapped_run_workflow.aio_run( + RunAgentInput[RunOutputDataT, AgentDepsT]( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + deprecated_kwargs=_deprecated_kwargs, + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(agent_run_id), + } + ), + ) + + if isinstance(result, dict): + return TypeAdapter(AgentRunResult[Any]).validate_python(result) + + return result + + @overload + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[OutputDataT]: ... + + @overload + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[RunOutputDataT]: ... + + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AgentRunResult[Any]: + """Run the agent with a user prompt in sync mode.""" + agent_run_id = uuid4() + + result = self.hatchet_wrapped_run_workflow.run( + RunAgentInput[RunOutputDataT, AgentDepsT]( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + deprecated_kwargs=_deprecated_kwargs, + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(agent_run_id), + } + ), + ) + + if isinstance(result, dict): + return TypeAdapter(AgentRunResult[Any]).validate_python(result) + + return result + + @overload + def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AbstractAsyncContextManager[StreamedRunResult[AgentDepsT, OutputDataT]]: ... + + @overload + def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AbstractAsyncContextManager[StreamedRunResult[AgentDepsT, RunOutputDataT]]: ... + + @asynccontextmanager + async def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AsyncIterator[StreamedRunResult[AgentDepsT, Any]]: + """Run the agent with a user prompt in async mode, returning a streamed response. + + Example: + ```python + from pydantic_ai import Agent + + agent = Agent('openai:gpt-4o') + + async def main(): + async with agent.run_stream('What is the capital of the UK?') as response: + print(await response.get_output()) + #> The capital of the UK is London. + ``` + + Args: + user_prompt: User input to start/continue the conversation. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + message_history: History of the conversation so far. + deferred_tool_results: Optional results for deferred tool calls in the message history. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + toolsets: Optional additional toolsets for this run. + event_stream_handler: Optional event stream handler to use for this run. It will receive all the events up until the final result is found, which you can then read or stream from inside the context manager. + + Returns: + The result of the run. + """ + if self._hatchet.is_in_task_run: + raise UserError( + '`agent.run_stream()` cannot currently be used inside a Hatchet workflow. ' + 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' + 'Please file an issue if this is not sufficient for your use case.' + ) + + with self._hatchet_overrides(): + async with super().run_stream( + user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + **_deprecated_kwargs, + ) as result: + yield result + + @overload + def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AbstractAsyncContextManager[AgentRun[AgentDepsT, OutputDataT]]: ... + + @overload + def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AbstractAsyncContextManager[AgentRun[AgentDepsT, RunOutputDataT]]: ... + + @asynccontextmanager + async def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AsyncIterator[AgentRun[AgentDepsT, Any]]: + """A contextmanager which can be used to iterate over the agent graph's nodes as they are executed. + + This method builds an internal agent graph (using system prompts, tools and output schemas) and then returns an + `AgentRun` object. The `AgentRun` can be used to async-iterate over the nodes of the graph as they are + executed. This is the API to use if you want to consume the outputs coming from each LLM model response, or the + stream of events coming from the execution of tools. + + The `AgentRun` also provides methods to access the full message history, new messages, and usage statistics, + and the final result of the run once it has completed. + + For more details, see the documentation of `AgentRun`. + + Example: + ```python + from pydantic_ai import Agent + + agent = Agent('openai:gpt-4o') + + async def main(): + nodes = [] + async with agent.iter('What is the capital of France?') as agent_run: + async for node in agent_run: + nodes.append(node) + print(nodes) + ''' + [ + UserPromptNode( + user_prompt='What is the capital of France?', + instructions_functions=[], + system_prompts=(), + system_prompt_functions=[], + system_prompt_dynamic_functions={}, + ), + ModelRequestNode( + request=ModelRequest( + parts=[ + UserPromptPart( + content='What is the capital of France?', + timestamp=datetime.datetime(...), + ) + ] + ) + ), + CallToolsNode( + model_response=ModelResponse( + parts=[TextPart(content='The capital of France is Paris.')], + usage=RequestUsage(input_tokens=56, output_tokens=7), + model_name='gpt-4o', + timestamp=datetime.datetime(...), + ) + ), + End(data=FinalResult(output='The capital of France is Paris.')), + ] + ''' + print(agent_run.result.output) + #> The capital of France is Paris. + ``` + + Args: + user_prompt: User input to start/continue the conversation. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + message_history: History of the conversation so far. + deferred_tool_results: Optional results for deferred tool calls in the message history. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + toolsets: Optional additional toolsets for this run. + + Returns: + The result of the run. + """ + async with super().iter( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + **_deprecated_kwargs, + ) as run: + yield run + + @contextmanager + def override( + self, + *, + deps: AgentDepsT | _utils.Unset = _utils.UNSET, + model: models.Model | models.KnownModelName | str | _utils.Unset = _utils.UNSET, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | _utils.Unset = _utils.UNSET, + tools: Sequence[Tool[AgentDepsT] | ToolFuncEither[AgentDepsT, ...]] | _utils.Unset = _utils.UNSET, + instructions: Instructions[AgentDepsT] | _utils.Unset = _utils.UNSET, + ) -> Iterator[None]: + """Context manager to temporarily override agent dependencies, model, toolsets, or tools. + + This is particularly useful when testing. + You can find an example of this [here](../testing.md#overriding-model-via-pytest-fixtures). + + Args: + deps: The dependencies to use instead of the dependencies passed to the agent run. + model: The model to use instead of the model passed to the agent run. + toolsets: The toolsets to use instead of the toolsets passed to the agent constructor and agent run. + tools: The tools to use instead of the tools registered with the agent. + instructions: The instructions to use instead of the instructions registered with the agent. + """ + with super().override(deps=deps, model=model, toolsets=toolsets, tools=tools): + yield diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py new file mode 100644 index 0000000000..ede2ffafbd --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -0,0 +1,114 @@ +from typing import Any + +from hatchet_sdk import Context, Hatchet +from hatchet_sdk.runnables.workflow import Standalone +from pydantic import BaseModel, ConfigDict + +from pydantic_ai.exceptions import UserError +from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.toolsets import FunctionToolset, ToolsetTool + +from ._mcp_server import CallToolInput +from ._run_context import HatchetRunContext +from ._toolset import HatchetWrapperToolset +from ._utils import TaskConfig + + +class ToolOutput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + result: Any + + +class HatchetFunctionToolset(HatchetWrapperToolset[AgentDepsT]): + """A wrapper for FunctionToolset that integrates with Hatchet, turning tool calls into Hatchet tasks.""" + + def __init__( + self, + wrapped: FunctionToolset[AgentDepsT], + *, + hatchet: Hatchet, + task_name_prefix: str, + task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], + ): + super().__init__(wrapped) + self._task_config = task_config + self._task_name_prefix = task_name_prefix + self._hatchet = hatchet + self._tool_tasks: dict[str, Standalone[CallToolInput[AgentDepsT], ToolOutput]] = {} + self.run_context_type = run_context_type + + for tool_name in wrapped.tools.keys(): + task_name = f'{task_name_prefix}__function_tool__{tool_name}' + + def make_tool_task(current_tool_name: str): + @hatchet.task( + name=task_name, + description=self._task_config.description, + input_validator=CallToolInput[AgentDepsT], + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, + ) + async def tool_task( + input: CallToolInput[AgentDepsT], + _ctx: Context, + ) -> ToolOutput: + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, deps=input.deps + ) + tool = (await wrapped.get_tools(run_context))[current_tool_name] + + result = await super(HatchetFunctionToolset, self).call_tool( + current_tool_name, input.tool_args, run_context, tool + ) + + return ToolOutput(result=result) + + return tool_task + + self._tool_tasks[tool_name] = make_tool_task(tool_name) + + @property + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + return list(self._tool_tasks.values()) + + async def call_tool( + self, + name: str, + tool_args: dict[str, Any], + ctx: RunContext[AgentDepsT], + tool: ToolsetTool[AgentDepsT], + ) -> Any: + if name not in self._tool_tasks: + raise UserError( + f'Tool {name!r} not found in toolset {self.id!r}. ' + 'Removing or renaming tools during an agent run is not supported with Hatchet.' + ) + + tool_task: Standalone[CallToolInput[AgentDepsT], ToolOutput] = self._tool_tasks[name] + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + + output = await tool_task.aio_run( + CallToolInput( + name=name, + tool_args=tool_args, + tool_def=tool.tool_def, + serialized_run_context=serialized_run_context, + deps=ctx.deps, + ) + ) + + return output.result diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py new file mode 100644 index 0000000000..9306e8a6b4 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -0,0 +1,172 @@ +from abc import ABC +from typing import TYPE_CHECKING, Any, Generic, TypeVar + +from hatchet_sdk import Context, Hatchet +from hatchet_sdk.runnables.workflow import Standalone +from pydantic import BaseModel, ConfigDict + +from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.toolsets.abstract import ( + ToolDefinition, + ToolsetTool, +) + +from ._run_context import HatchetRunContext, SerializedHatchetRunContext +from ._toolset import HatchetWrapperToolset +from ._utils import TaskConfig + +if TYPE_CHECKING: + from pydantic_ai.mcp import MCPServer, ToolResult + +T = TypeVar('T') + + +class GetToolsInput(BaseModel, Generic[AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + + serialized_run_context: SerializedHatchetRunContext + deps: AgentDepsT + + +class CallToolInput(BaseModel, Generic[AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + + name: str + tool_args: dict[str, Any] + tool_def: ToolDefinition + + serialized_run_context: SerializedHatchetRunContext + deps: AgentDepsT + + +class CallToolOutput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + result: 'ToolResult' + + +class HatchetMCPServer(HatchetWrapperToolset[AgentDepsT], ABC): + """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" + + def __init__( + self, + wrapped: 'MCPServer', + *, + hatchet: Hatchet, + task_name_prefix: str, + task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], + ): + super().__init__(wrapped) + self._task_config = task_config + self._task_name_prefix = task_name_prefix + self._hatchet = hatchet + id_suffix = f'__{wrapped.id}' if wrapped.id else '' + self._name = f'{task_name_prefix}__mcp_server{id_suffix}' + self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type + + @hatchet.task( + name=f'{self._name}.get_tools', + description=self._task_config.description, + input_validator=GetToolsInput[AgentDepsT], + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, + ) + async def wrapped_get_tools_task( + input: GetToolsInput[AgentDepsT], + _ctx: Context, + ) -> dict[str, ToolDefinition]: + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + + # ToolsetTool is not serializable as it holds a SchemaValidator (which is also the same for every MCP tool so unnecessary to pass along the wire every time), + # so we just return the ToolDefinitions and wrap them in ToolsetTool outside of the activity. + tools = await super(HatchetMCPServer, self).get_tools(run_context) + + return {name: tool.tool_def for name, tool in tools.items()} + + self.hatchet_wrapped_get_tools_task = wrapped_get_tools_task + + @hatchet.task( + name=f'{self._name}.call_tool', + description=self._task_config.description, + input_validator=CallToolInput[AgentDepsT], + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, + ) + async def wrapped_call_tool_task( + input: CallToolInput[AgentDepsT], + _ctx: Context, + ) -> CallToolOutput[AgentDepsT]: + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + tool = self.tool_for_tool_def(input.tool_def) + + result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, tool) + + return CallToolOutput[AgentDepsT](result=result) + + self.hatchet_wrapped_call_tool_task = wrapped_call_tool_task + + @property + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + return [ + self.hatchet_wrapped_get_tools_task, + self.hatchet_wrapped_call_tool_task, + ] + + def tool_for_tool_def(self, tool_def: ToolDefinition) -> ToolsetTool[AgentDepsT]: + assert isinstance(self.wrapped, MCPServer) + return self.wrapped.tool_for_tool_def(tool_def) + + async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + tool_defs = await self.hatchet_wrapped_get_tools_task.aio_run( + GetToolsInput( + serialized_run_context=serialized_run_context, + deps=ctx.deps, + ) + ) + + return {name: self.tool_for_tool_def(tool_def) for name, tool_def in tool_defs.items()} + + async def call_tool( + self, + name: str, + tool_args: dict[str, Any], + ctx: RunContext[AgentDepsT], + tool: ToolsetTool[AgentDepsT], + ) -> 'ToolResult': + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + + wrapped_tool_output = await self.hatchet_wrapped_call_tool_task.aio_run( + CallToolInput( + name=name, + tool_args=tool_args, + tool_def=tool.tool_def, + serialized_run_context=serialized_run_context, + deps=ctx.deps, + ) + ) + + return wrapped_tool_output.result diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py new file mode 100644 index 0000000000..6875fdb5c9 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -0,0 +1,216 @@ +from __future__ import annotations + +import json +from collections.abc import AsyncIterator +from contextlib import asynccontextmanager +from dataclasses import asdict +from datetime import datetime +from typing import Any + +from hatchet_sdk import Context, Hatchet +from pydantic import BaseModel, ConfigDict + +from pydantic_ai.agent import EventStreamHandler +from pydantic_ai.exceptions import UserError +from pydantic_ai.messages import ( + ModelMessage, + ModelResponse, + ModelResponseStreamEvent, +) +from pydantic_ai.models import Model, ModelRequestParameters, StreamedResponse +from pydantic_ai.models.wrapper import WrapperModel +from pydantic_ai.settings import ModelSettings +from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.usage import RequestUsage + +from ._run_context import HatchetRunContext +from ._utils import TaskConfig + + +class ModelInput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + messages: list[ModelMessage] + model_settings: ModelSettings | None + model_request_parameters: ModelRequestParameters + + +class ModelStreamInput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + messages: list[ModelMessage] + model_settings: ModelSettings | None + model_request_parameters: ModelRequestParameters + serialized_run_context: Any + deps_type_name: str + + +class HatchetModel(WrapperModel): + """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" + + def __init__( + self, + model: Model, + *, + task_name_prefix: str, + task_config: TaskConfig, + hatchet: Hatchet, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + deps_type: type[AgentDepsT] | None = None, + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], + ): + super().__init__(model) + self.task_config = task_config + self.hatchet = hatchet + self._task_name_prefix = task_name_prefix + self.event_stream_handler = event_stream_handler + self.deps_type = deps_type + self.run_context_type = run_context_type + + @hatchet.task( + name=f'{self._task_name_prefix}__model__request', + description=self.task_config.description, + input_validator=ModelInput, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_request_task( + input: ModelInput, + _ctx: Context, + ) -> ModelResponse: + return await super(HatchetModel, self).request( + input.messages, input.model_settings, input.model_request_parameters + ) + + self.hatchet_wrapped_request_task = wrapped_request_task + + @hatchet.task( + name=f'{self._task_name_prefix}__model__request_stream', + description=self.task_config.description, + input_validator=ModelStreamInput, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_request_stream_task( + input: ModelStreamInput, + ctx: Context, + ) -> ModelResponse: + assert self.event_stream_handler + + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, + deps=input.serialized_run_context, + ) + + async with self.wrapped.request_stream( + input.messages, input.model_settings, input.model_request_parameters, run_context + ) as streamed_response: + async for s in streamed_response: + print('streamed chunk', s) + serialized = json.dumps(asdict(s), default=str) + + await ctx.aio_put_stream(serialized) + + return streamed_response.get() + + self.hatchet_wrapped_request_stream_task = wrapped_request_stream_task + + async def request( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + ) -> ModelResponse: + return await self.hatchet_wrapped_request_task.aio_run( + ModelInput( + messages=messages, + model_settings=model_settings, + model_request_parameters=model_request_parameters, + ) + ) + + @asynccontextmanager + async def request_stream( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + run_context: RunContext[Any] | None = None, + ): + if self.hatchet.is_in_task_run: + async with super().request_stream( + messages, model_settings, model_request_parameters, run_context + ) as streamed_response: + yield streamed_response + return + + if run_context is None: + raise UserError( + 'A Hatchet model cannot be used with `pydantic_ai.direct.model_request_stream()` as it requires a `run_context`. Set an `event_stream_handler` on the agent and use `agent.run()` instead.' + ) + + assert self.event_stream_handler is not None + + res = await self.hatchet_wrapped_request_stream_task.aio_run( + input=ModelStreamInput( + messages=messages, + model_settings=model_settings, + model_request_parameters=model_request_parameters, + serialized_run_context=self.run_context_type.serialize_run_context(run_context), + deps_type_name=self.deps_type.__name__ if self.deps_type else '', + ) + ) + + yield HatchetStreamedResponse( + model_request_parameters=model_request_parameters, + response=res, + ) + + +class HatchetStreamedResponse(StreamedResponse): + def __init__(self, model_request_parameters: ModelRequestParameters, response: ModelResponse): + super().__init__(model_request_parameters) + self.response = response + + async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: + return + # noinspection PyUnreachableCode + yield + + def get(self) -> ModelResponse: + return self.response + + def usage(self) -> RequestUsage: + return self.response.usage # pragma: no cover + + @property + def model_name(self) -> str: + return self.response.model_name or '' # pragma: no cover + + @property + def provider_name(self) -> str: + return self.response.provider_name or '' # pragma: no cover + + @property + def timestamp(self) -> datetime: + return self.response.timestamp # pragma: no cover diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py new file mode 100644 index 0000000000..5bb8517544 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel, Field + +from pydantic_ai.exceptions import UserError +from pydantic_ai.tools import AgentDepsT, RunContext + + +class SerializedHatchetRunContext(BaseModel): + retries: dict[str, int] = Field(default_factory=dict) + tool_call_id: str | None = None + tool_name: str | None = None + tool_call_approved: bool = False + retry: int = 0 + run_step: int = 0 + + +class HatchetRunContext(RunContext[AgentDepsT]): + """The [`RunContext`][pydantic_ai.tools.RunContext] subclass to use to serialize and deserialize the run context for use inside a Hatchet task. + + By default, only the `deps`, `retries`, `tool_call_id`, `tool_name`, `tool_call_approved`, `retry` and `run_step` attributes will be available. + To make another attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to [`HatchetAgent`][pydantic_ai.durable_exec.hatchet.HatchetAgent]. + """ + + def __init__(self, deps: AgentDepsT, **kwargs: Any): + self.__dict__ = {**kwargs, 'deps': deps} + setattr( + self, + '__dataclass_fields__', + {name: field for name, field in RunContext.__dataclass_fields__.items() if name in self.__dict__}, + ) + + def __getattribute__(self, name: str) -> Any: + try: + return super().__getattribute__(name) + except AttributeError as e: # pragma: no cover + if name in RunContext.__dataclass_fields__: + raise UserError( + f'{self.__class__.__name__!r} object has no attribute {name!r}. ' + 'To make the attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to `HatchetAgent`.' + ) + else: + raise e + + @classmethod + def serialize_run_context(cls, ctx: RunContext[Any]) -> SerializedHatchetRunContext: + """Serialize the run context to a `SerializedHatchetRunContext`.""" + return SerializedHatchetRunContext( + retries=ctx.retries, + tool_call_id=ctx.tool_call_id, + tool_name=ctx.tool_name, + tool_call_approved=ctx.tool_call_approved, + retry=ctx.retry, + run_step=ctx.run_step, + ) + + @classmethod + def deserialize_run_context( + cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT + ) -> HatchetRunContext[AgentDepsT]: + """Deserialize the run context from a `SerializedHatchetRunContext`.""" + return cls( + deps=deps, + **ctx.model_dump(), + ) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py new file mode 100644 index 0000000000..8b0c8d5e07 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Callable +from typing import Any + +from hatchet_sdk import Hatchet +from hatchet_sdk.runnables.workflow import Standalone + +from pydantic_ai.tools import AgentDepsT +from pydantic_ai.toolsets.abstract import AbstractToolset +from pydantic_ai.toolsets.function import FunctionToolset +from pydantic_ai.toolsets.wrapper import WrapperToolset + +from ._run_context import HatchetRunContext +from ._utils import TaskConfig + + +class HatchetWrapperToolset(WrapperToolset[AgentDepsT], ABC): + @property + def id(self) -> str: + assert self.wrapped.id is not None + return self.wrapped.id + + @property + @abstractmethod + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + raise NotImplementedError + + def visit_and_replace( + self, visitor: Callable[[AbstractToolset[AgentDepsT]], AbstractToolset[AgentDepsT]] + ) -> AbstractToolset[AgentDepsT]: + return self + + +def hatchetize_toolset( + toolset: AbstractToolset[AgentDepsT], + hatchet: Hatchet, + task_name_prefix: str, + task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], +) -> AbstractToolset[AgentDepsT]: + """Hatchetize a toolset. + + Args: + toolset: The toolset to hatchetize. + hatchet: The Hatchet instance to use for creating tasks. + task_name_prefix: Prefix for Hatchet task names. + task_config: The Hatchet task config to use. + deps_type: The type of agent's dependencies object. It needs to be serializable using Pydantic's `TypeAdapter`. + run_context_type: The `HatchetRunContext` (sub)class that's used to serialize and deserialize the run context. + """ + if isinstance(toolset, FunctionToolset): + from ._function_toolset import HatchetFunctionToolset + + return HatchetFunctionToolset( + toolset, + hatchet=hatchet, + task_name_prefix=task_name_prefix, + task_config=task_config, + deps_type=deps_type, + run_context_type=run_context_type, + ) + + try: + from pydantic_ai.mcp import MCPServer + + from ._mcp_server import HatchetMCPServer + except ImportError: + pass + else: + if isinstance(toolset, MCPServer): + return HatchetMCPServer( + toolset, + hatchet=hatchet, + task_name_prefix=task_name_prefix, + task_config=task_config, + deps_type=deps_type, + run_context_type=run_context_type, + ) + + return toolset diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py new file mode 100644 index 0000000000..8a459b6eba --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +from datetime import timedelta + +from hatchet_sdk import ConcurrencyExpression, DefaultFilter, StickyStrategy +from hatchet_sdk.labels import DesiredWorkerLabel +from hatchet_sdk.rate_limit import RateLimit +from hatchet_sdk.runnables.types import Duration +from pydantic import BaseModel + + +class TaskConfig(BaseModel): + description: str | None = None + version: str | None = None + sticky: StickyStrategy | None = None + default_priority: int = 1 + concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None + schedule_timeout: Duration = timedelta(minutes=5) + execution_timeout: Duration = timedelta(seconds=60) + retries: int = 0 + rate_limits: list[RateLimit] | None = None + desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None + backoff_factor: float | None = None + backoff_max_seconds: int | None = None + default_filters: list[DefaultFilter] | None = None diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index de6e164d4b..35e34792c0 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -100,6 +100,8 @@ retries = ["tenacity>=8.2.3"] temporal = ["temporalio==1.18.0"] # DBOS dbos = ["dbos>=1.14.0"] +# Hatchet +hatchet = ["hatchet-sdk @ git+https://github.com/hatchet-dev/hatchet.git@mk/tweaks-for-pydantic-ai#subdirectory=sdks/python"] [tool.hatch.metadata] allow-direct-references = true diff --git a/pyproject.toml b/pyproject.toml index 7aeff9a59f..c0e236fabb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ dependencies = [ examples = ["pydantic-ai-examples=={{ version }}"] a2a = ["fasta2a>=0.4.1"] dbos = ["pydantic-ai-slim[dbos]=={{ version }}"] +hatchet = ["pydantic-ai-slim[hatchet]=={{ version }}"] [project.urls] Homepage = "https://ai.pydantic.dev" diff --git a/uv.lock b/uv.lock index 270552e04d..ddd19ab389 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1400,6 +1400,120 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/11/1019a6cfdb2e520cb461cf70d859216be8ca122ddf5ad301fc3b0ee45fd4/groq-0.25.0-py3-none-any.whl", hash = "sha256:aadc78b40b1809cdb196b1aa8c7f7293108767df1508cafa3e0d5045d9328e7a", size = 129371, upload-time = "2025-05-16T19:57:41.786Z" }, ] +[[package]] +name = "grpcio" +version = "1.75.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/57/89fd829fb00a6d0bee3fbcb2c8a7aa0252d908949b6ab58bfae99d39d77e/grpcio-1.75.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:1712b5890b22547dd29f3215c5788d8fc759ce6dd0b85a6ba6e2731f2d04c088", size = 5705534, upload-time = "2025-09-26T09:00:52.225Z" }, + { url = "https://files.pythonhosted.org/packages/76/dd/2f8536e092551cf804e96bcda79ecfbc51560b214a0f5b7ebc253f0d4664/grpcio-1.75.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d04e101bba4b55cea9954e4aa71c24153ba6182481b487ff376da28d4ba46cf", size = 11484103, upload-time = "2025-09-26T09:00:59.457Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3d/affe2fb897804c98d56361138e73786af8f4dd876b9d9851cfe6342b53c8/grpcio-1.75.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:683cfc70be0c1383449097cba637317e4737a357cfc185d887fd984206380403", size = 6289953, upload-time = "2025-09-26T09:01:03.699Z" }, + { url = "https://files.pythonhosted.org/packages/87/aa/0f40b7f47a0ff10d7e482bc3af22dac767c7ff27205915f08962d5ca87a2/grpcio-1.75.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:491444c081a54dcd5e6ada57314321ae526377f498d4aa09d975c3241c5b9e1c", size = 6949785, upload-time = "2025-09-26T09:01:07.504Z" }, + { url = "https://files.pythonhosted.org/packages/a5/45/b04407e44050781821c84f26df71b3f7bc469923f92f9f8bc27f1406dbcc/grpcio-1.75.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce08d4e112d0d38487c2b631ec8723deac9bc404e9c7b1011426af50a79999e4", size = 6465708, upload-time = "2025-09-26T09:01:11.028Z" }, + { url = "https://files.pythonhosted.org/packages/09/3e/4ae3ec0a4d20dcaafbb6e597defcde06399ccdc5b342f607323f3b47f0a3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5a2acda37fc926ccc4547977ac3e56b1df48fe200de968e8c8421f6e3093df6c", size = 7100912, upload-time = "2025-09-26T09:01:14.393Z" }, + { url = "https://files.pythonhosted.org/packages/34/3f/a9085dab5c313bb0cb853f222d095e2477b9b8490a03634cdd8d19daa5c3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:745c5fe6bf05df6a04bf2d11552c7d867a2690759e7ab6b05c318a772739bd75", size = 8042497, upload-time = "2025-09-26T09:01:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/ea54eba931ab9ed3f999ba95f5d8d01a20221b664725bab2fe93e3dee848/grpcio-1.75.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:259526a7159d39e2db40d566fe3e8f8e034d0fb2db5bf9c00e09aace655a4c2b", size = 7493284, upload-time = "2025-09-26T09:01:20.896Z" }, + { url = "https://files.pythonhosted.org/packages/b7/5e/287f1bf1a998f4ac46ef45d518de3b5da08b4e86c7cb5e1108cee30b0282/grpcio-1.75.1-cp310-cp310-win32.whl", hash = "sha256:f4b29b9aabe33fed5df0a85e5f13b09ff25e2c05bd5946d25270a8bd5682dac9", size = 3950809, upload-time = "2025-09-26T09:01:23.695Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/3cbfc06a4ec160dc77403b29ecb5cf76ae329eb63204fea6a7c715f1dfdb/grpcio-1.75.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf2e760978dcce7ff7d465cbc7e276c3157eedc4c27aa6de7b594c7a295d3d61", size = 4644704, upload-time = "2025-09-26T09:01:25.763Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, + { url = "https://files.pythonhosted.org/packages/46/74/bac4ab9f7722164afdf263ae31ba97b8174c667153510322a5eba4194c32/grpcio-1.75.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:3bed22e750d91d53d9e31e0af35a7b0b51367e974e14a4ff229db5b207647884", size = 5672779, upload-time = "2025-09-26T09:02:19.11Z" }, + { url = "https://files.pythonhosted.org/packages/a6/52/d0483cfa667cddaa294e3ab88fd2c2a6e9dc1a1928c0e5911e2e54bd5b50/grpcio-1.75.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5b8f381eadcd6ecaa143a21e9e80a26424c76a0a9b3d546febe6648f3a36a5ac", size = 11470623, upload-time = "2025-09-26T09:02:22.117Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e4/d1954dce2972e32384db6a30273275e8c8ea5a44b80347f9055589333b3f/grpcio-1.75.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5bf4001d3293e3414d0cf99ff9b1139106e57c3a66dfff0c5f60b2a6286ec133", size = 6248838, upload-time = "2025-09-26T09:02:26.426Z" }, + { url = "https://files.pythonhosted.org/packages/06/43/073363bf63826ba8077c335d797a8d026f129dc0912b69c42feaf8f0cd26/grpcio-1.75.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f82ff474103e26351dacfe8d50214e7c9322960d8d07ba7fa1d05ff981c8b2d", size = 6922663, upload-time = "2025-09-26T09:02:28.724Z" }, + { url = "https://files.pythonhosted.org/packages/c2/6f/076ac0df6c359117676cacfa8a377e2abcecec6a6599a15a672d331f6680/grpcio-1.75.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ee119f4f88d9f75414217823d21d75bfe0e6ed40135b0cbbfc6376bc9f7757d", size = 6436149, upload-time = "2025-09-26T09:02:30.971Z" }, + { url = "https://files.pythonhosted.org/packages/6b/27/1d08824f1d573fcb1fa35ede40d6020e68a04391709939e1c6f4193b445f/grpcio-1.75.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:664eecc3abe6d916fa6cf8dd6b778e62fb264a70f3430a3180995bf2da935446", size = 7067989, upload-time = "2025-09-26T09:02:33.233Z" }, + { url = "https://files.pythonhosted.org/packages/c6/98/98594cf97b8713feb06a8cb04eeef60b4757e3e2fb91aa0d9161da769843/grpcio-1.75.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c32193fa08b2fbebf08fe08e84f8a0aad32d87c3ad42999c65e9449871b1c66e", size = 8010717, upload-time = "2025-09-26T09:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7e/bb80b1bba03c12158f9254762cdf5cced4a9bc2e8ed51ed335915a5a06ef/grpcio-1.75.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5cebe13088b9254f6e615bcf1da9131d46cfa4e88039454aca9cb65f639bd3bc", size = 7463822, upload-time = "2025-09-26T09:02:38.26Z" }, + { url = "https://files.pythonhosted.org/packages/23/1c/1ea57fdc06927eb5640f6750c697f596f26183573069189eeaf6ef86ba2d/grpcio-1.75.1-cp313-cp313-win32.whl", hash = "sha256:4b4c678e7ed50f8ae8b8dbad15a865ee73ce12668b6aaf411bf3258b5bc3f970", size = 3938490, upload-time = "2025-09-26T09:02:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/4b/24/fbb8ff1ccadfbf78ad2401c41aceaf02b0d782c084530d8871ddd69a2d49/grpcio-1.75.1-cp313-cp313-win_amd64.whl", hash = "sha256:5573f51e3f296a1bcf71e7a690c092845fb223072120f4bdb7a5b48e111def66", size = 4642538, upload-time = "2025-09-26T09:02:42.519Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1b/9a0a5cecd24302b9fdbcd55d15ed6267e5f3d5b898ff9ac8cbe17ee76129/grpcio-1.75.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:c05da79068dd96723793bffc8d0e64c45f316248417515f28d22204d9dae51c7", size = 5673319, upload-time = "2025-09-26T09:02:44.742Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ec/9d6959429a83fbf5df8549c591a8a52bb313976f6646b79852c4884e3225/grpcio-1.75.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06373a94fd16ec287116a825161dca179a0402d0c60674ceeec8c9fba344fe66", size = 11480347, upload-time = "2025-09-26T09:02:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/09/7a/26da709e42c4565c3d7bf999a9569da96243ce34a8271a968dee810a7cf1/grpcio-1.75.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4484f4b7287bdaa7a5b3980f3c7224c3c622669405d20f69549f5fb956ad0421", size = 6254706, upload-time = "2025-09-26T09:02:50.4Z" }, + { url = "https://files.pythonhosted.org/packages/f1/08/dcb26a319d3725f199c97e671d904d84ee5680de57d74c566a991cfab632/grpcio-1.75.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2720c239c1180eee69f7883c1d4c83fc1a495a2535b5fa322887c70bf02b16e8", size = 6922501, upload-time = "2025-09-26T09:02:52.711Z" }, + { url = "https://files.pythonhosted.org/packages/78/66/044d412c98408a5e23cb348845979a2d17a2e2b6c3c34c1ec91b920f49d0/grpcio-1.75.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:07a554fa31c668cf0e7a188678ceeca3cb8fead29bbe455352e712ec33ca701c", size = 6437492, upload-time = "2025-09-26T09:02:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9d/5e3e362815152aa1afd8b26ea613effa005962f9da0eec6e0e4527e7a7d1/grpcio-1.75.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3e71a2105210366bfc398eef7f57a664df99194f3520edb88b9c3a7e46ee0d64", size = 7081061, upload-time = "2025-09-26T09:02:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1a/46615682a19e100f46e31ddba9ebc297c5a5ab9ddb47b35443ffadb8776c/grpcio-1.75.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8679aa8a5b67976776d3c6b0521e99d1c34db8a312a12bcfd78a7085cb9b604e", size = 8010849, upload-time = "2025-09-26T09:03:00.548Z" }, + { url = "https://files.pythonhosted.org/packages/67/8e/3204b94ac30b0f675ab1c06540ab5578660dc8b690db71854d3116f20d00/grpcio-1.75.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:aad1c774f4ebf0696a7f148a56d39a3432550612597331792528895258966dc0", size = 7464478, upload-time = "2025-09-26T09:03:03.096Z" }, + { url = "https://files.pythonhosted.org/packages/b7/97/2d90652b213863b2cf466d9c1260ca7e7b67a16780431b3eb1d0420e3d5b/grpcio-1.75.1-cp314-cp314-win32.whl", hash = "sha256:62ce42d9994446b307649cb2a23335fa8e927f7ab2cbf5fcb844d6acb4d85f9c", size = 4012672, upload-time = "2025-09-26T09:03:05.477Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/e2e6e9fc1c985cd1a59e6996a05647c720fe8a03b92f5ec2d60d366c531e/grpcio-1.75.1-cp314-cp314-win_amd64.whl", hash = "sha256:f86e92275710bea3000cb79feca1762dc0ad3b27830dd1a74e82ab321d4ee464", size = 4772475, upload-time = "2025-09-26T09:03:07.661Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.71.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/ad/e74a4d1cffff628c2ef1ec5b9944fb098207cc4af6eb8db4bc52e6d99236/grpcio_tools-1.71.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:ab8a28c2e795520d6dc6ffd7efaef4565026dbf9b4f5270de2f3dd1ce61d2318", size = 2385557, upload-time = "2025-06-28T04:20:38.833Z" }, + { url = "https://files.pythonhosted.org/packages/63/bf/30b63418279d6fdc4fd4a3781a7976c40c7e8ee052333b9ce6bd4ce63f30/grpcio_tools-1.71.2-cp310-cp310-macosx_10_14_universal2.whl", hash = "sha256:654ecb284a592d39a85556098b8c5125163435472a20ead79b805cf91814b99e", size = 5446915, upload-time = "2025-06-28T04:20:40.947Z" }, + { url = "https://files.pythonhosted.org/packages/83/cd/2994e0a0a67714fdb00c207c4bec60b9b356fbd6b0b7a162ecaabe925155/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b49aded2b6c890ff690d960e4399a336c652315c6342232c27bd601b3705739e", size = 2348301, upload-time = "2025-06-28T04:20:42.766Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8b/4f2315927af306af1b35793b332b9ca9dc5b5a2cde2d55811c9577b5f03f/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7811a6fc1c4b4e5438e5eb98dbd52c2dc4a69d1009001c13356e6636322d41a", size = 2742159, upload-time = "2025-06-28T04:20:44.206Z" }, + { url = "https://files.pythonhosted.org/packages/8d/98/d513f6c09df405c82583e7083c20718ea615ed0da69ec42c80ceae7ebdc5/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393a9c80596aa2b3f05af854e23336ea8c295593bbb35d9adae3d8d7943672bd", size = 2473444, upload-time = "2025-06-28T04:20:45.5Z" }, + { url = "https://files.pythonhosted.org/packages/fa/fe/00af17cc841916d5e4227f11036bf443ce006629212c876937c7904b0ba3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:823e1f23c12da00f318404c4a834bb77cd150d14387dee9789ec21b335249e46", size = 2850339, upload-time = "2025-06-28T04:20:46.758Z" }, + { url = "https://files.pythonhosted.org/packages/7d/59/745fc50dfdbed875fcfd6433883270d39d23fb1aa4ecc9587786f772dce3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9bfbea79d6aec60f2587133ba766ede3dc3e229641d1a1e61d790d742a3d19eb", size = 3300795, upload-time = "2025-06-28T04:20:48.327Z" }, + { url = "https://files.pythonhosted.org/packages/62/3e/d9d0fb2df78e601c28d02ef0cd5d007f113c1b04fc21e72bf56e8c3df66b/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:32f3a67b10728835b5ffb63fbdbe696d00e19a27561b9cf5153e72dbb93021ba", size = 2913729, upload-time = "2025-06-28T04:20:49.641Z" }, + { url = "https://files.pythonhosted.org/packages/09/ae/ddb264b4a10c6c10336a7c177f8738b230c2c473d0c91dd5d8ce8ea1b857/grpcio_tools-1.71.2-cp310-cp310-win32.whl", hash = "sha256:7fcf9d92c710bfc93a1c0115f25e7d49a65032ff662b38b2f704668ce0a938df", size = 945997, upload-time = "2025-06-28T04:20:50.9Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8d/5efd93698fe359f63719d934ebb2d9337e82d396e13d6bf00f4b06793e37/grpcio_tools-1.71.2-cp310-cp310-win_amd64.whl", hash = "sha256:914b4275be810290266e62349f2d020bb7cc6ecf9edb81da3c5cddb61a95721b", size = 1117474, upload-time = "2025-06-28T04:20:52.54Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/0568d38b8da6237ea8ea15abb960fb7ab83eb7bb51e0ea5926dab3d865b1/grpcio_tools-1.71.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:0acb8151ea866be5b35233877fbee6445c36644c0aa77e230c9d1b46bf34b18b", size = 2385557, upload-time = "2025-06-28T04:20:54.323Z" }, + { url = "https://files.pythonhosted.org/packages/76/fb/700d46f72b0f636cf0e625f3c18a4f74543ff127471377e49a071f64f1e7/grpcio_tools-1.71.2-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:b28f8606f4123edb4e6da281547465d6e449e89f0c943c376d1732dc65e6d8b3", size = 5447590, upload-time = "2025-06-28T04:20:55.836Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/d9bb2aec3de305162b23c5c884b9f79b1a195d42b1e6dabcc084cc9d0804/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:cbae6f849ad2d1f5e26cd55448b9828e678cb947fa32c8729d01998238266a6a", size = 2348495, upload-time = "2025-06-28T04:20:57.33Z" }, + { url = "https://files.pythonhosted.org/packages/d5/83/f840aba1690461b65330efbca96170893ee02fae66651bcc75f28b33a46c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d1027615cfb1e9b1f31f2f384251c847d68c2f3e025697e5f5c72e26ed1316", size = 2742333, upload-time = "2025-06-28T04:20:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/30/34/c02cd9b37de26045190ba665ee6ab8597d47f033d098968f812d253bbf8c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bac95662dc69338edb9eb727cc3dd92342131b84b12b3e8ec6abe973d4cbf1b", size = 2473490, upload-time = "2025-06-28T04:21:00.614Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c7/375718ae091c8f5776828ce97bdcb014ca26244296f8b7f70af1a803ed2f/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c50250c7248055040f89eb29ecad39d3a260a4b6d3696af1575945f7a8d5dcdc", size = 2850333, upload-time = "2025-06-28T04:21:01.95Z" }, + { url = "https://files.pythonhosted.org/packages/19/37/efc69345bd92a73b2bc80f4f9e53d42dfdc234b2491ae58c87da20ca0ea5/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6ab1ad955e69027ef12ace4d700c5fc36341bdc2f420e87881e9d6d02af3d7b8", size = 3300748, upload-time = "2025-06-28T04:21:03.451Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1f/15f787eb25ae42086f55ed3e4260e85f385921c788debf0f7583b34446e3/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd75dde575781262b6b96cc6d0b2ac6002b2f50882bf5e06713f1bf364ee6e09", size = 2913178, upload-time = "2025-06-28T04:21:04.879Z" }, + { url = "https://files.pythonhosted.org/packages/12/aa/69cb3a9dff7d143a05e4021c3c9b5cde07aacb8eb1c892b7c5b9fb4973e3/grpcio_tools-1.71.2-cp311-cp311-win32.whl", hash = "sha256:9a3cb244d2bfe0d187f858c5408d17cb0e76ca60ec9a274c8fd94cc81457c7fc", size = 946256, upload-time = "2025-06-28T04:21:06.518Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/fb951c5c87eadb507a832243942e56e67d50d7667b0e5324616ffd51b845/grpcio_tools-1.71.2-cp311-cp311-win_amd64.whl", hash = "sha256:00eb909997fd359a39b789342b476cbe291f4dd9c01ae9887a474f35972a257e", size = 1117661, upload-time = "2025-06-28T04:21:08.18Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" }, + { url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" }, + { url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8a/e4c1c4cb8c9ff7f50b7b2bba94abe8d1e98ea05f52a5db476e7f1c1a3c70/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a28eda8137d587eb30081384c256f5e5de7feda34776f89848b846da64e4be35", size = 2743321, upload-time = "2025-06-28T04:21:15.007Z" }, + { url = "https://files.pythonhosted.org/packages/fd/aa/95bc77fda5c2d56fb4a318c1b22bdba8914d5d84602525c99047114de531/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19c083198f5eb15cc69c0a2f2c415540cbc636bfe76cea268e5894f34023b40", size = 2474005, upload-time = "2025-06-28T04:21:16.443Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ff/ca11f930fe1daa799ee0ce1ac9630d58a3a3deed3dd2f465edb9a32f299d/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:784c284acda0d925052be19053d35afbf78300f4d025836d424cf632404f676a", size = 2851559, upload-time = "2025-06-28T04:21:18.139Z" }, + { url = "https://files.pythonhosted.org/packages/64/10/c6fc97914c7e19c9bb061722e55052fa3f575165da9f6510e2038d6e8643/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:381e684d29a5d052194e095546eef067201f5af30fd99b07b5d94766f44bf1ae", size = 3300622, upload-time = "2025-06-28T04:21:20.291Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d6/965f36cfc367c276799b730d5dd1311b90a54a33726e561393b808339b04/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3e4b4801fabd0427fc61d50d09588a01b1cfab0ec5e8a5f5d515fbdd0891fd11", size = 2913863, upload-time = "2025-06-28T04:21:22.196Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f0/c05d5c3d0c1d79ac87df964e9d36f1e3a77b60d948af65bec35d3e5c75a3/grpcio_tools-1.71.2-cp312-cp312-win32.whl", hash = "sha256:84ad86332c44572305138eafa4cc30040c9a5e81826993eae8227863b700b490", size = 945744, upload-time = "2025-06-28T04:21:23.463Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e9/c84c1078f0b7af7d8a40f5214a9bdd8d2a567ad6c09975e6e2613a08d29d/grpcio_tools-1.71.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e1108d37eecc73b1c4a27350a6ed921b5dda25091700c1da17cfe30761cd462", size = 1117695, upload-time = "2025-06-28T04:21:25.22Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/bdf9c5055a1ad0a09123402d73ecad3629f75b9cf97828d547173b328891/grpcio_tools-1.71.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:b0f0a8611614949c906e25c225e3360551b488d10a366c96d89856bcef09f729", size = 2384758, upload-time = "2025-06-28T04:21:26.712Z" }, + { url = "https://files.pythonhosted.org/packages/49/d0/6aaee4940a8fb8269c13719f56d69c8d39569bee272924086aef81616d4a/grpcio_tools-1.71.2-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:7931783ea7ac42ac57f94c5047d00a504f72fbd96118bf7df911bb0e0435fc0f", size = 5443127, upload-time = "2025-06-28T04:21:28.383Z" }, + { url = "https://files.pythonhosted.org/packages/d9/11/50a471dcf301b89c0ed5ab92c533baced5bd8f796abfd133bbfadf6b60e5/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:d188dc28e069aa96bb48cb11b1338e47ebdf2e2306afa58a8162cc210172d7a8", size = 2349627, upload-time = "2025-06-28T04:21:30.254Z" }, + { url = "https://files.pythonhosted.org/packages/bb/66/e3dc58362a9c4c2fbe98a7ceb7e252385777ebb2bbc7f42d5ab138d07ace/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f36c4b3cc42ad6ef67430639174aaf4a862d236c03c4552c4521501422bfaa26", size = 2742932, upload-time = "2025-06-28T04:21:32.325Z" }, + { url = "https://files.pythonhosted.org/packages/b7/1e/1e07a07ed8651a2aa9f56095411198385a04a628beba796f36d98a5a03ec/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bd9ed12ce93b310f0cef304176049d0bc3b9f825e9c8c6a23e35867fed6affd", size = 2473627, upload-time = "2025-06-28T04:21:33.752Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f9/3b7b32e4acb419f3a0b4d381bc114fe6cd48e3b778e81273fc9e4748caad/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7ce27e76dd61011182d39abca38bae55d8a277e9b7fe30f6d5466255baccb579", size = 2850879, upload-time = "2025-06-28T04:21:35.241Z" }, + { url = "https://files.pythonhosted.org/packages/1e/99/cd9e1acd84315ce05ad1fcdfabf73b7df43807cf00c3b781db372d92b899/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:dcc17bf59b85c3676818f2219deacac0156492f32ca165e048427d2d3e6e1157", size = 3300216, upload-time = "2025-06-28T04:21:36.826Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c0/66eab57b14550c5b22404dbf60635c9e33efa003bd747211981a9859b94b/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:706360c71bdd722682927a1fb517c276ccb816f1e30cb71f33553e5817dc4031", size = 2913521, upload-time = "2025-06-28T04:21:38.347Z" }, + { url = "https://files.pythonhosted.org/packages/05/9b/7c90af8f937d77005625d705ab1160bc42a7e7b021ee5c788192763bccd6/grpcio_tools-1.71.2-cp313-cp313-win32.whl", hash = "sha256:bcf751d5a81c918c26adb2d6abcef71035c77d6eb9dd16afaf176ee096e22c1d", size = 945322, upload-time = "2025-06-28T04:21:39.864Z" }, + { url = "https://files.pythonhosted.org/packages/5f/80/6db6247f767c94fe551761772f89ceea355ff295fd4574cb8efc8b2d1199/grpcio_tools-1.71.2-cp313-cp313-win_amd64.whl", hash = "sha256:b1581a1133552aba96a730178bc44f6f1a071f0eb81c5b6bc4c0f89f5314e2b8", size = 1117234, upload-time = "2025-06-28T04:21:41.893Z" }, +] + [[package]] name = "grpclib" version = "0.4.7" @@ -1432,6 +1546,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, ] +[[package]] +name = "hatchet-sdk" +version = "1.19.1" +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#ed3a64aa36f0e4c15da148023894422ed5b1eccd" } +dependencies = [ + { name = "aiohttp" }, + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "prometheus-client" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-dateutil" }, + { name = "tenacity" }, + { name = "urllib3" }, +] + [[package]] name = "hf-xet" version = "1.1.3" @@ -2839,6 +2970,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/dd/f0183ed0145e58cf9d286c1b2c14f63ccee987a4ff79ac85acc31b5d86bd/primp-0.15.0-cp38-abi3-win_amd64.whl", hash = "sha256:aeb6bd20b06dfc92cfe4436939c18de88a58c640752cf7f30d9e4ae893cdec32", size = 3149967, upload-time = "2025-04-17T11:41:07.067Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + [[package]] name = "prompt-toolkit" version = "3.0.50" @@ -2942,16 +3082,16 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.3" +version = "5.29.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945, upload-time = "2025-01-08T21:38:51.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708, upload-time = "2025-01-08T21:38:31.799Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/aae8e10512b83de633f2646506a6d835b151edf4b30d18d73afd01447253/protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a", size = 434508, upload-time = "2025-01-08T21:38:35.489Z" }, - { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825, upload-time = "2025-01-08T21:38:36.642Z" }, - { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573, upload-time = "2025-01-08T21:38:37.896Z" }, - { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672, upload-time = "2025-01-08T21:38:40.204Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550, upload-time = "2025-01-08T21:38:50.439Z" }, + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, ] [[package]] @@ -3128,6 +3268,9 @@ dbos = [ examples = [ { name = "pydantic-ai-examples" }, ] +hatchet = [ + { name = "pydantic-ai-slim", extra = ["hatchet"] }, +] [package.dev-dependencies] dev = [ @@ -3178,8 +3321,9 @@ requires-dist = [ { name = "pydantic-ai-examples", marker = "extra == 'examples'", editable = "examples" }, { name = "pydantic-ai-slim", extras = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "vertexai"], editable = "pydantic_ai_slim" }, { name = "pydantic-ai-slim", extras = ["dbos"], marker = "extra == 'dbos'", editable = "pydantic_ai_slim" }, + { name = "pydantic-ai-slim", extras = ["hatchet"], marker = "extra == 'hatchet'", editable = "pydantic_ai_slim" }, ] -provides-extras = ["a2a", "dbos", "examples"] +provides-extras = ["a2a", "dbos", "examples", "hatchet"] [package.metadata.requires-dev] dev = [ @@ -3316,6 +3460,9 @@ google = [ groq = [ { name = "groq" }, ] +hatchet = [ + { name = "hatchet-sdk" }, +] huggingface = [ { name = "huggingface-hub", extra = ["inference"] }, ] @@ -3361,6 +3508,7 @@ requires-dist = [ { name = "google-genai", marker = "extra == 'google'", specifier = ">=1.31.0" }, { name = "griffe", specifier = ">=1.3.2" }, { name = "groq", marker = "extra == 'groq'", specifier = ">=0.25.0" }, + { name = "hatchet-sdk", marker = "extra == 'hatchet'", git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai" }, { name = "httpx", specifier = ">=0.27" }, { name = "huggingface-hub", extras = ["inference"], marker = "extra == 'huggingface'", specifier = ">=0.33.5" }, { name = "logfire", extras = ["httpx"], marker = "extra == 'logfire'", specifier = ">=3.14.1" }, @@ -3381,7 +3529,7 @@ requires-dist = [ { name = "tenacity", marker = "extra == 'retries'", specifier = ">=8.2.3" }, { name = "typing-inspection", specifier = ">=0.4.0" }, ] -provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "hatchet", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] [[package]] name = "pydantic-core" @@ -4088,6 +4236,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552, upload-time = "2022-05-26T13:35:21.206Z" }, ] +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -4529,11 +4686,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.3.0" +version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]]