From 080d2bc64643fa2dce930a43f31fce85ea160aa1 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 11:53:28 -0400 Subject: [PATCH 01/42] chore: scaffold hatchet agent, model, and mcp server --- .../durable_exec/hatchet/__init__.py | 5 ++++ .../durable_exec/hatchet/_agent.py | 28 +++++++++++++++++++ .../durable_exec/hatchet/_mcp_server.py | 22 +++++++++++++++ .../durable_exec/hatchet/_model.py | 16 +++++++++++ 4 files changed, 71 insertions(+) create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py new file mode 100644 index 0000000000..f12d1cd92f --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/__init__.py @@ -0,0 +1,5 @@ +from ._agent import HatchetAgent +from ._mcp_server import HatchetMCPServer +from ._model import HatchetModel + +__all__ = ['HatchetAgent', 'HatchetModel', 'HatchetMCPServer'] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py new file mode 100644 index 0000000000..3b759a3217 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from pydantic_ai.agent import AbstractAgent, WrapperAgent +from pydantic_ai.output import OutputDataT +from pydantic_ai.tools import ( + AgentDepsT, +) + + +class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): + def __init__( + self, + wrapped: AbstractAgent[AgentDepsT, OutputDataT], + ): + """Wrap an agent to enable it with DBOS durable workflows, by automatically offloading model requests, tool calls, and MCP server communication to DBOS steps. + + After wrapping, the original agent can still be used as normal outside of the DBOS workflow. + + Args: + wrapped: The agent to wrap. + name: Optional unique agent name to use as the DBOS configured instance name. If not provided, the agent's `name` will be used. + event_stream_handler: Optional event stream handler to use instead of the one set on the wrapped agent. + mcp_step_config: The base DBOS step config to use for MCP server steps. If no config is provided, use the default settings of DBOS. + model_step_config: The DBOS step config to use for model request steps. If no config is provided, use the default settings of DBOS. + """ + super().__init__(wrapped) + + pass diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py new file mode 100644 index 0000000000..8bcdeee8c2 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +from abc import ABC +from typing import TYPE_CHECKING + +from pydantic_ai.tools import AgentDepsT +from pydantic_ai.toolsets.wrapper import WrapperToolset + +if TYPE_CHECKING: + from pydantic_ai.mcp import MCPServer + + +class HatchetMCPServer(WrapperToolset[AgentDepsT], ABC): + """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" + + def __init__( + self, + wrapped: MCPServer, + ): + super().__init__(wrapped) + + pass diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py new file mode 100644 index 0000000000..4ed03d5474 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -0,0 +1,16 @@ +from __future__ import annotations + +from pydantic_ai.models import Model +from pydantic_ai.models.wrapper import WrapperModel + + +class HatchetModel(WrapperModel): + """A wrapper for Model that integrates with DBOS, turning request and request_stream to DBOS steps.""" + + def __init__( + self, + model: Model, + ): + super().__init__(model) + + pass From 1aa689ceaabf013f8c6ae1c1fcebaf0450b42254 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 11:55:52 -0400 Subject: [PATCH 02/42] chore: add hatchet dep --- pydantic_ai_slim/pyproject.toml | 2 ++ pyproject.toml | 1 + 2 files changed, 3 insertions(+) diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index a6bdc48333..7dfb544256 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -100,6 +100,8 @@ retries = ["tenacity>=8.2.3"] temporal = ["temporalio==1.17.0"] # DBOS dbos = ["dbos>=1.13.0"] +# Hatchet +hatchet = ["hatchet-sdk>=1.18.1"] [tool.hatch.metadata] allow-direct-references = true diff --git a/pyproject.toml b/pyproject.toml index 6b647d78e8..50416daabc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ dependencies = [ examples = ["pydantic-ai-examples=={{ version }}"] a2a = ["fasta2a>=0.4.1"] dbos = ["pydantic-ai-slim[dbos]=={{ version }}"] +hatchet = ["pydantic-ai-slim[hatchet]=={{ version }}"] [project.urls] Homepage = "https://ai.pydantic.dev" From 5e8dedfee6e1ad99bf8a8f7f22f11728a56594b3 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 12:20:29 -0400 Subject: [PATCH 03/42] feat: add utils for task config --- .../durable_exec/hatchet/_utils.py | 29 +++ uv.lock | 171 +++++++++++++++++- 2 files changed, 190 insertions(+), 10 deletions(-) create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py new file mode 100644 index 0000000000..d34f683461 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from datetime import timedelta + +from hatchet_sdk import ConcurrencyExpression, DefaultFilter, StickyStrategy +from hatchet_sdk.labels import DesiredWorkerLabel +from hatchet_sdk.rate_limit import RateLimit +from hatchet_sdk.runnables.types import Duration +from pydantic import BaseModel + + +class TaskConfig(BaseModel): + name: str | None = None + description: str | None = None + input_validator: None = None + on_events: list[str] | None = None + on_crons: list[str] | None = None + version: str | None = None + sticky: StickyStrategy | None = None + default_priority: int = 1 + concurrency: ConcurrencyExpression | list[ConcurrencyExpression] | None = None + schedule_timeout: Duration = timedelta(minutes=5) + execution_timeout: Duration = timedelta(seconds=60) + retries: int = 0 + rate_limits: list[RateLimit] | None = None + desired_worker_labels: dict[str, DesiredWorkerLabel] | None = None + backoff_factor: float | None = None + backoff_max_seconds: int | None = None + default_filters: list[DefaultFilter] | None = None diff --git a/uv.lock b/uv.lock index f0ea07c7a7..2f053c74c3 100644 --- a/uv.lock +++ b/uv.lock @@ -1547,6 +1547,110 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/11/1019a6cfdb2e520cb461cf70d859216be8ca122ddf5ad301fc3b0ee45fd4/groq-0.25.0-py3-none-any.whl", hash = "sha256:aadc78b40b1809cdb196b1aa8c7f7293108767df1508cafa3e0d5045d9328e7a", size = 129371, upload-time = "2025-05-16T19:57:41.786Z" }, ] +[[package]] +name = "grpcio" +version = "1.75.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/88/fe2844eefd3d2188bc0d7a2768c6375b46dfd96469ea52d8aeee8587d7e0/grpcio-1.75.0.tar.gz", hash = "sha256:b989e8b09489478c2d19fecc744a298930f40d8b27c3638afbfe84d22f36ce4e", size = 12722485, upload-time = "2025-09-16T09:20:21.731Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/90/91f780f6cb8b2aa1bc8b8f8561a4e9d3bfe5dea10a4532843f2b044e18ac/grpcio-1.75.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:1ec9cbaec18d9597c718b1ed452e61748ac0b36ba350d558f9ded1a94cc15ec7", size = 5696373, upload-time = "2025-09-16T09:18:07.971Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c6/eaf9065ff15d0994e1674e71e1ca9542ee47f832b4df0fde1b35e5641fa1/grpcio-1.75.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7ee5ee42bfae8238b66a275f9ebcf6f295724375f2fa6f3b52188008b6380faf", size = 11465905, upload-time = "2025-09-16T09:18:12.383Z" }, + { url = "https://files.pythonhosted.org/packages/8a/21/ae33e514cb7c3f936b378d1c7aab6d8e986814b3489500c5cc860c48ce88/grpcio-1.75.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9146e40378f551eed66c887332afc807fcce593c43c698e21266a4227d4e20d2", size = 6282149, upload-time = "2025-09-16T09:18:15.427Z" }, + { url = "https://files.pythonhosted.org/packages/d5/46/dff6344e6f3e81707bc87bba796592036606aca04b6e9b79ceec51902b80/grpcio-1.75.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0c40f368541945bb664857ecd7400acb901053a1abbcf9f7896361b2cfa66798", size = 6940277, upload-time = "2025-09-16T09:18:17.564Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5f/e52cb2c16e097d950c36e7bb2ef46a3b2e4c7ae6b37acb57d88538182b85/grpcio-1.75.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:50a6e43a9adc6938e2a16c9d9f8a2da9dd557ddd9284b73b07bd03d0e098d1e9", size = 6460422, upload-time = "2025-09-16T09:18:19.657Z" }, + { url = "https://files.pythonhosted.org/packages/fd/16/527533f0bd9cace7cd800b7dae903e273cc987fc472a398a4bb6747fec9b/grpcio-1.75.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dce15597ca11913b78e1203c042d5723e3ea7f59e7095a1abd0621be0e05b895", size = 7089969, upload-time = "2025-09-16T09:18:21.73Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/1d448820bc88a2be7045aac817a59ba06870e1ebad7ed19525af7ac079e7/grpcio-1.75.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:851194eec47755101962da423f575ea223c9dd7f487828fe5693920e8745227e", size = 8033548, upload-time = "2025-09-16T09:18:23.819Z" }, + { url = "https://files.pythonhosted.org/packages/37/00/19e87ab12c8b0d73a252eef48664030de198514a4e30bdf337fa58bcd4dd/grpcio-1.75.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ca123db0813eef80625a4242a0c37563cb30a3edddebe5ee65373854cf187215", size = 7487161, upload-time = "2025-09-16T09:18:25.934Z" }, + { url = "https://files.pythonhosted.org/packages/37/d0/f7b9deaa6ccca9997fa70b4e143cf976eaec9476ecf4d05f7440ac400635/grpcio-1.75.0-cp310-cp310-win32.whl", hash = "sha256:222b0851e20c04900c63f60153503e918b08a5a0fad8198401c0b1be13c6815b", size = 3946254, upload-time = "2025-09-16T09:18:28.42Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/8d04744c7dc720cc9805a27f879cbf7043bb5c78dce972f6afb8613860de/grpcio-1.75.0-cp310-cp310-win_amd64.whl", hash = "sha256:bb58e38a50baed9b21492c4b3f3263462e4e37270b7ea152fc10124b4bd1c318", size = 4640072, upload-time = "2025-09-16T09:18:30.426Z" }, + { url = "https://files.pythonhosted.org/packages/95/b7/a6f42596fc367656970f5811e5d2d9912ca937aa90621d5468a11680ef47/grpcio-1.75.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:7f89d6d0cd43170a80ebb4605cad54c7d462d21dc054f47688912e8bf08164af", size = 5699769, upload-time = "2025-09-16T09:18:32.536Z" }, + { url = "https://files.pythonhosted.org/packages/c2/42/284c463a311cd2c5f804fd4fdbd418805460bd5d702359148dd062c1685d/grpcio-1.75.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:cb6c5b075c2d092f81138646a755f0dad94e4622300ebef089f94e6308155d82", size = 11480362, upload-time = "2025-09-16T09:18:35.562Z" }, + { url = "https://files.pythonhosted.org/packages/0b/10/60d54d5a03062c3ae91bddb6e3acefe71264307a419885f453526d9203ff/grpcio-1.75.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:494dcbade5606128cb9f530ce00331a90ecf5e7c5b243d373aebdb18e503c346", size = 6284753, upload-time = "2025-09-16T09:18:38.055Z" }, + { url = "https://files.pythonhosted.org/packages/cf/af/381a4bfb04de5e2527819452583e694df075c7a931e9bf1b2a603b593ab2/grpcio-1.75.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:050760fd29c8508844a720f06c5827bb00de8f5e02f58587eb21a4444ad706e5", size = 6944103, upload-time = "2025-09-16T09:18:40.844Z" }, + { url = "https://files.pythonhosted.org/packages/16/18/c80dd7e1828bd6700ce242c1616871927eef933ed0c2cee5c636a880e47b/grpcio-1.75.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:266fa6209b68a537b2728bb2552f970e7e78c77fe43c6e9cbbe1f476e9e5c35f", size = 6464036, upload-time = "2025-09-16T09:18:43.351Z" }, + { url = "https://files.pythonhosted.org/packages/79/3f/78520c7ed9ccea16d402530bc87958bbeb48c42a2ec8032738a7864d38f8/grpcio-1.75.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:06d22e1d8645e37bc110f4c589cb22c283fd3de76523065f821d6e81de33f5d4", size = 7097455, upload-time = "2025-09-16T09:18:45.465Z" }, + { url = "https://files.pythonhosted.org/packages/ad/69/3cebe4901a865eb07aefc3ee03a02a632e152e9198dadf482a7faf926f31/grpcio-1.75.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9880c323595d851292785966cadb6c708100b34b163cab114e3933f5773cba2d", size = 8037203, upload-time = "2025-09-16T09:18:47.878Z" }, + { url = "https://files.pythonhosted.org/packages/04/ed/1e483d1eba5032642c10caf28acf07ca8de0508244648947764956db346a/grpcio-1.75.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:55a2d5ae79cd0f68783fb6ec95509be23746e3c239290b2ee69c69a38daa961a", size = 7492085, upload-time = "2025-09-16T09:18:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/ee/65/6ef676aa7dbd9578dfca990bb44d41a49a1e36344ca7d79de6b59733ba96/grpcio-1.75.0-cp311-cp311-win32.whl", hash = "sha256:352dbdf25495eef584c8de809db280582093bc3961d95a9d78f0dfb7274023a2", size = 3944697, upload-time = "2025-09-16T09:18:53.427Z" }, + { url = "https://files.pythonhosted.org/packages/0d/83/b753373098b81ec5cb01f71c21dfd7aafb5eb48a1566d503e9fd3c1254fe/grpcio-1.75.0-cp311-cp311-win_amd64.whl", hash = "sha256:678b649171f229fb16bda1a2473e820330aa3002500c4f9fd3a74b786578e90f", size = 4642235, upload-time = "2025-09-16T09:18:56.095Z" }, + { url = "https://files.pythonhosted.org/packages/0d/93/a1b29c2452d15cecc4a39700fbf54721a3341f2ddbd1bd883f8ec0004e6e/grpcio-1.75.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:fa35ccd9501ffdd82b861809cbfc4b5b13f4b4c5dc3434d2d9170b9ed38a9054", size = 5661861, upload-time = "2025-09-16T09:18:58.748Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ce/7280df197e602d14594e61d1e60e89dfa734bb59a884ba86cdd39686aadb/grpcio-1.75.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0fcb77f2d718c1e58cc04ef6d3b51e0fa3b26cf926446e86c7eba105727b6cd4", size = 11459982, upload-time = "2025-09-16T09:19:01.211Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9b/37e61349771f89b543a0a0bbc960741115ea8656a2414bfb24c4de6f3dd7/grpcio-1.75.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:36764a4ad9dc1eb891042fab51e8cdf7cc014ad82cee807c10796fb708455041", size = 6239680, upload-time = "2025-09-16T09:19:04.443Z" }, + { url = "https://files.pythonhosted.org/packages/a6/66/f645d9d5b22ca307f76e71abc83ab0e574b5dfef3ebde4ec8b865dd7e93e/grpcio-1.75.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:725e67c010f63ef17fc052b261004942763c0b18dcd84841e6578ddacf1f9d10", size = 6908511, upload-time = "2025-09-16T09:19:07.884Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9a/34b11cd62d03c01b99068e257595804c695c3c119596c7077f4923295e19/grpcio-1.75.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:91fbfc43f605c5ee015c9056d580a70dd35df78a7bad97e05426795ceacdb59f", size = 6429105, upload-time = "2025-09-16T09:19:10.085Z" }, + { url = "https://files.pythonhosted.org/packages/1a/46/76eaceaad1f42c1e7e6a5b49a61aac40fc5c9bee4b14a1630f056ac3a57e/grpcio-1.75.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a9337ac4ce61c388e02019d27fa837496c4b7837cbbcec71b05934337e51531", size = 7060578, upload-time = "2025-09-16T09:19:12.283Z" }, + { url = "https://files.pythonhosted.org/packages/3d/82/181a0e3f1397b6d43239e95becbeb448563f236c0db11ce990f073b08d01/grpcio-1.75.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ee16e232e3d0974750ab5f4da0ab92b59d6473872690b5e40dcec9a22927f22e", size = 8003283, upload-time = "2025-09-16T09:19:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/de/09/a335bca211f37a3239be4b485e3c12bf3da68d18b1f723affdff2b9e9680/grpcio-1.75.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55dfb9122973cc69520b23d39867726722cafb32e541435707dc10249a1bdbc6", size = 7460319, upload-time = "2025-09-16T09:19:18.409Z" }, + { url = "https://files.pythonhosted.org/packages/aa/59/6330105cdd6bc4405e74c96838cd7e148c3653ae3996e540be6118220c79/grpcio-1.75.0-cp312-cp312-win32.whl", hash = "sha256:fb64dd62face3d687a7b56cd881e2ea39417af80f75e8b36f0f81dfd93071651", size = 3934011, upload-time = "2025-09-16T09:19:21.013Z" }, + { url = "https://files.pythonhosted.org/packages/ff/14/e1309a570b7ebdd1c8ca24c4df6b8d6690009fa8e0d997cb2c026ce850c9/grpcio-1.75.0-cp312-cp312-win_amd64.whl", hash = "sha256:6b365f37a9c9543a9e91c6b4103d68d38d5bcb9965b11d5092b3c157bd6a5ee7", size = 4637934, upload-time = "2025-09-16T09:19:23.19Z" }, + { url = "https://files.pythonhosted.org/packages/00/64/dbce0ffb6edaca2b292d90999dd32a3bd6bc24b5b77618ca28440525634d/grpcio-1.75.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:1bb78d052948d8272c820bb928753f16a614bb2c42fbf56ad56636991b427518", size = 5666860, upload-time = "2025-09-16T09:19:25.417Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e6/da02c8fa882ad3a7f868d380bb3da2c24d35dd983dd12afdc6975907a352/grpcio-1.75.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:9dc4a02796394dd04de0b9673cb79a78901b90bb16bf99ed8cb528c61ed9372e", size = 11455148, upload-time = "2025-09-16T09:19:28.615Z" }, + { url = "https://files.pythonhosted.org/packages/ba/a0/84f87f6c2cf2a533cfce43b2b620eb53a51428ec0c8fe63e5dd21d167a70/grpcio-1.75.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:437eeb16091d31498585d73b133b825dc80a8db43311e332c08facf820d36894", size = 6243865, upload-time = "2025-09-16T09:19:31.342Z" }, + { url = "https://files.pythonhosted.org/packages/be/12/53da07aa701a4839dd70d16e61ce21ecfcc9e929058acb2f56e9b2dd8165/grpcio-1.75.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:c2c39984e846bd5da45c5f7bcea8fafbe47c98e1ff2b6f40e57921b0c23a52d0", size = 6915102, upload-time = "2025-09-16T09:19:33.658Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c0/7eaceafd31f52ec4bf128bbcf36993b4bc71f64480f3687992ddd1a6e315/grpcio-1.75.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38d665f44b980acdbb2f0e1abf67605ba1899f4d2443908df9ec8a6f26d2ed88", size = 6432042, upload-time = "2025-09-16T09:19:36.583Z" }, + { url = "https://files.pythonhosted.org/packages/6b/12/a2ce89a9f4fc52a16ed92951f1b05f53c17c4028b3db6a4db7f08332bee8/grpcio-1.75.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2e8e752ab5cc0a9c5b949808c000ca7586223be4f877b729f034b912364c3964", size = 7062984, upload-time = "2025-09-16T09:19:39.163Z" }, + { url = "https://files.pythonhosted.org/packages/55/a6/2642a9b491e24482d5685c0f45c658c495a5499b43394846677abed2c966/grpcio-1.75.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3a6788b30aa8e6f207c417874effe3f79c2aa154e91e78e477c4825e8b431ce0", size = 8001212, upload-time = "2025-09-16T09:19:41.726Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/530d4428750e9ed6ad4254f652b869a20a40a276c1f6817b8c12d561f5ef/grpcio-1.75.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc33e67cab6141c54e75d85acd5dec616c5095a957ff997b4330a6395aa9b51", size = 7457207, upload-time = "2025-09-16T09:19:44.368Z" }, + { url = "https://files.pythonhosted.org/packages/e2/6f/843670007e0790af332a21468d10059ea9fdf97557485ae633b88bd70efc/grpcio-1.75.0-cp313-cp313-win32.whl", hash = "sha256:c8cfc780b7a15e06253aae5f228e1e84c0d3c4daa90faf5bc26b751174da4bf9", size = 3934235, upload-time = "2025-09-16T09:19:46.815Z" }, + { url = "https://files.pythonhosted.org/packages/4b/92/c846b01b38fdf9e2646a682b12e30a70dc7c87dfe68bd5e009ee1501c14b/grpcio-1.75.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c91d5b16eff3cbbe76b7a1eaaf3d91e7a954501e9d4f915554f87c470475c3d", size = 4637558, upload-time = "2025-09-16T09:19:49.698Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.71.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/ad/e74a4d1cffff628c2ef1ec5b9944fb098207cc4af6eb8db4bc52e6d99236/grpcio_tools-1.71.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:ab8a28c2e795520d6dc6ffd7efaef4565026dbf9b4f5270de2f3dd1ce61d2318", size = 2385557, upload-time = "2025-06-28T04:20:38.833Z" }, + { url = "https://files.pythonhosted.org/packages/63/bf/30b63418279d6fdc4fd4a3781a7976c40c7e8ee052333b9ce6bd4ce63f30/grpcio_tools-1.71.2-cp310-cp310-macosx_10_14_universal2.whl", hash = "sha256:654ecb284a592d39a85556098b8c5125163435472a20ead79b805cf91814b99e", size = 5446915, upload-time = "2025-06-28T04:20:40.947Z" }, + { url = "https://files.pythonhosted.org/packages/83/cd/2994e0a0a67714fdb00c207c4bec60b9b356fbd6b0b7a162ecaabe925155/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b49aded2b6c890ff690d960e4399a336c652315c6342232c27bd601b3705739e", size = 2348301, upload-time = "2025-06-28T04:20:42.766Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8b/4f2315927af306af1b35793b332b9ca9dc5b5a2cde2d55811c9577b5f03f/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7811a6fc1c4b4e5438e5eb98dbd52c2dc4a69d1009001c13356e6636322d41a", size = 2742159, upload-time = "2025-06-28T04:20:44.206Z" }, + { url = "https://files.pythonhosted.org/packages/8d/98/d513f6c09df405c82583e7083c20718ea615ed0da69ec42c80ceae7ebdc5/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393a9c80596aa2b3f05af854e23336ea8c295593bbb35d9adae3d8d7943672bd", size = 2473444, upload-time = "2025-06-28T04:20:45.5Z" }, + { url = "https://files.pythonhosted.org/packages/fa/fe/00af17cc841916d5e4227f11036bf443ce006629212c876937c7904b0ba3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:823e1f23c12da00f318404c4a834bb77cd150d14387dee9789ec21b335249e46", size = 2850339, upload-time = "2025-06-28T04:20:46.758Z" }, + { url = "https://files.pythonhosted.org/packages/7d/59/745fc50dfdbed875fcfd6433883270d39d23fb1aa4ecc9587786f772dce3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9bfbea79d6aec60f2587133ba766ede3dc3e229641d1a1e61d790d742a3d19eb", size = 3300795, upload-time = "2025-06-28T04:20:48.327Z" }, + { url = "https://files.pythonhosted.org/packages/62/3e/d9d0fb2df78e601c28d02ef0cd5d007f113c1b04fc21e72bf56e8c3df66b/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:32f3a67b10728835b5ffb63fbdbe696d00e19a27561b9cf5153e72dbb93021ba", size = 2913729, upload-time = "2025-06-28T04:20:49.641Z" }, + { url = "https://files.pythonhosted.org/packages/09/ae/ddb264b4a10c6c10336a7c177f8738b230c2c473d0c91dd5d8ce8ea1b857/grpcio_tools-1.71.2-cp310-cp310-win32.whl", hash = "sha256:7fcf9d92c710bfc93a1c0115f25e7d49a65032ff662b38b2f704668ce0a938df", size = 945997, upload-time = "2025-06-28T04:20:50.9Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8d/5efd93698fe359f63719d934ebb2d9337e82d396e13d6bf00f4b06793e37/grpcio_tools-1.71.2-cp310-cp310-win_amd64.whl", hash = "sha256:914b4275be810290266e62349f2d020bb7cc6ecf9edb81da3c5cddb61a95721b", size = 1117474, upload-time = "2025-06-28T04:20:52.54Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/0568d38b8da6237ea8ea15abb960fb7ab83eb7bb51e0ea5926dab3d865b1/grpcio_tools-1.71.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:0acb8151ea866be5b35233877fbee6445c36644c0aa77e230c9d1b46bf34b18b", size = 2385557, upload-time = "2025-06-28T04:20:54.323Z" }, + { url = "https://files.pythonhosted.org/packages/76/fb/700d46f72b0f636cf0e625f3c18a4f74543ff127471377e49a071f64f1e7/grpcio_tools-1.71.2-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:b28f8606f4123edb4e6da281547465d6e449e89f0c943c376d1732dc65e6d8b3", size = 5447590, upload-time = "2025-06-28T04:20:55.836Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/d9bb2aec3de305162b23c5c884b9f79b1a195d42b1e6dabcc084cc9d0804/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:cbae6f849ad2d1f5e26cd55448b9828e678cb947fa32c8729d01998238266a6a", size = 2348495, upload-time = "2025-06-28T04:20:57.33Z" }, + { url = "https://files.pythonhosted.org/packages/d5/83/f840aba1690461b65330efbca96170893ee02fae66651bcc75f28b33a46c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d1027615cfb1e9b1f31f2f384251c847d68c2f3e025697e5f5c72e26ed1316", size = 2742333, upload-time = "2025-06-28T04:20:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/30/34/c02cd9b37de26045190ba665ee6ab8597d47f033d098968f812d253bbf8c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bac95662dc69338edb9eb727cc3dd92342131b84b12b3e8ec6abe973d4cbf1b", size = 2473490, upload-time = "2025-06-28T04:21:00.614Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c7/375718ae091c8f5776828ce97bdcb014ca26244296f8b7f70af1a803ed2f/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c50250c7248055040f89eb29ecad39d3a260a4b6d3696af1575945f7a8d5dcdc", size = 2850333, upload-time = "2025-06-28T04:21:01.95Z" }, + { url = "https://files.pythonhosted.org/packages/19/37/efc69345bd92a73b2bc80f4f9e53d42dfdc234b2491ae58c87da20ca0ea5/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6ab1ad955e69027ef12ace4d700c5fc36341bdc2f420e87881e9d6d02af3d7b8", size = 3300748, upload-time = "2025-06-28T04:21:03.451Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1f/15f787eb25ae42086f55ed3e4260e85f385921c788debf0f7583b34446e3/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd75dde575781262b6b96cc6d0b2ac6002b2f50882bf5e06713f1bf364ee6e09", size = 2913178, upload-time = "2025-06-28T04:21:04.879Z" }, + { url = "https://files.pythonhosted.org/packages/12/aa/69cb3a9dff7d143a05e4021c3c9b5cde07aacb8eb1c892b7c5b9fb4973e3/grpcio_tools-1.71.2-cp311-cp311-win32.whl", hash = "sha256:9a3cb244d2bfe0d187f858c5408d17cb0e76ca60ec9a274c8fd94cc81457c7fc", size = 946256, upload-time = "2025-06-28T04:21:06.518Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/fb951c5c87eadb507a832243942e56e67d50d7667b0e5324616ffd51b845/grpcio_tools-1.71.2-cp311-cp311-win_amd64.whl", hash = "sha256:00eb909997fd359a39b789342b476cbe291f4dd9c01ae9887a474f35972a257e", size = 1117661, upload-time = "2025-06-28T04:21:08.18Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" }, + { url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" }, + { url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8a/e4c1c4cb8c9ff7f50b7b2bba94abe8d1e98ea05f52a5db476e7f1c1a3c70/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a28eda8137d587eb30081384c256f5e5de7feda34776f89848b846da64e4be35", size = 2743321, upload-time = "2025-06-28T04:21:15.007Z" }, + { url = "https://files.pythonhosted.org/packages/fd/aa/95bc77fda5c2d56fb4a318c1b22bdba8914d5d84602525c99047114de531/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19c083198f5eb15cc69c0a2f2c415540cbc636bfe76cea268e5894f34023b40", size = 2474005, upload-time = "2025-06-28T04:21:16.443Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ff/ca11f930fe1daa799ee0ce1ac9630d58a3a3deed3dd2f465edb9a32f299d/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:784c284acda0d925052be19053d35afbf78300f4d025836d424cf632404f676a", size = 2851559, upload-time = "2025-06-28T04:21:18.139Z" }, + { url = "https://files.pythonhosted.org/packages/64/10/c6fc97914c7e19c9bb061722e55052fa3f575165da9f6510e2038d6e8643/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:381e684d29a5d052194e095546eef067201f5af30fd99b07b5d94766f44bf1ae", size = 3300622, upload-time = "2025-06-28T04:21:20.291Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d6/965f36cfc367c276799b730d5dd1311b90a54a33726e561393b808339b04/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3e4b4801fabd0427fc61d50d09588a01b1cfab0ec5e8a5f5d515fbdd0891fd11", size = 2913863, upload-time = "2025-06-28T04:21:22.196Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f0/c05d5c3d0c1d79ac87df964e9d36f1e3a77b60d948af65bec35d3e5c75a3/grpcio_tools-1.71.2-cp312-cp312-win32.whl", hash = "sha256:84ad86332c44572305138eafa4cc30040c9a5e81826993eae8227863b700b490", size = 945744, upload-time = "2025-06-28T04:21:23.463Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e9/c84c1078f0b7af7d8a40f5214a9bdd8d2a567ad6c09975e6e2613a08d29d/grpcio_tools-1.71.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e1108d37eecc73b1c4a27350a6ed921b5dda25091700c1da17cfe30761cd462", size = 1117695, upload-time = "2025-06-28T04:21:25.22Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/bdf9c5055a1ad0a09123402d73ecad3629f75b9cf97828d547173b328891/grpcio_tools-1.71.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:b0f0a8611614949c906e25c225e3360551b488d10a366c96d89856bcef09f729", size = 2384758, upload-time = "2025-06-28T04:21:26.712Z" }, + { url = "https://files.pythonhosted.org/packages/49/d0/6aaee4940a8fb8269c13719f56d69c8d39569bee272924086aef81616d4a/grpcio_tools-1.71.2-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:7931783ea7ac42ac57f94c5047d00a504f72fbd96118bf7df911bb0e0435fc0f", size = 5443127, upload-time = "2025-06-28T04:21:28.383Z" }, + { url = "https://files.pythonhosted.org/packages/d9/11/50a471dcf301b89c0ed5ab92c533baced5bd8f796abfd133bbfadf6b60e5/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:d188dc28e069aa96bb48cb11b1338e47ebdf2e2306afa58a8162cc210172d7a8", size = 2349627, upload-time = "2025-06-28T04:21:30.254Z" }, + { url = "https://files.pythonhosted.org/packages/bb/66/e3dc58362a9c4c2fbe98a7ceb7e252385777ebb2bbc7f42d5ab138d07ace/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f36c4b3cc42ad6ef67430639174aaf4a862d236c03c4552c4521501422bfaa26", size = 2742932, upload-time = "2025-06-28T04:21:32.325Z" }, + { url = "https://files.pythonhosted.org/packages/b7/1e/1e07a07ed8651a2aa9f56095411198385a04a628beba796f36d98a5a03ec/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bd9ed12ce93b310f0cef304176049d0bc3b9f825e9c8c6a23e35867fed6affd", size = 2473627, upload-time = "2025-06-28T04:21:33.752Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f9/3b7b32e4acb419f3a0b4d381bc114fe6cd48e3b778e81273fc9e4748caad/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7ce27e76dd61011182d39abca38bae55d8a277e9b7fe30f6d5466255baccb579", size = 2850879, upload-time = "2025-06-28T04:21:35.241Z" }, + { url = "https://files.pythonhosted.org/packages/1e/99/cd9e1acd84315ce05ad1fcdfabf73b7df43807cf00c3b781db372d92b899/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:dcc17bf59b85c3676818f2219deacac0156492f32ca165e048427d2d3e6e1157", size = 3300216, upload-time = "2025-06-28T04:21:36.826Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c0/66eab57b14550c5b22404dbf60635c9e33efa003bd747211981a9859b94b/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:706360c71bdd722682927a1fb517c276ccb816f1e30cb71f33553e5817dc4031", size = 2913521, upload-time = "2025-06-28T04:21:38.347Z" }, + { url = "https://files.pythonhosted.org/packages/05/9b/7c90af8f937d77005625d705ab1160bc42a7e7b021ee5c788192763bccd6/grpcio_tools-1.71.2-cp313-cp313-win32.whl", hash = "sha256:bcf751d5a81c918c26adb2d6abcef71035c77d6eb9dd16afaf176ee096e22c1d", size = 945322, upload-time = "2025-06-28T04:21:39.864Z" }, + { url = "https://files.pythonhosted.org/packages/5f/80/6db6247f767c94fe551761772f89ceea355ff295fd4574cb8efc8b2d1199/grpcio_tools-1.71.2-cp313-cp313-win_amd64.whl", hash = "sha256:b1581a1133552aba96a730178bc44f6f1a071f0eb81c5b6bc4c0f89f5314e2b8", size = 1117234, upload-time = "2025-06-28T04:21:41.893Z" }, +] + [[package]] name = "grpclib" version = "0.4.7" @@ -1579,6 +1683,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, ] +[[package]] +name = "hatchet-sdk" +version = "1.18.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "prometheus-client" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-dateutil" }, + { name = "tenacity" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/10/5a2cf5c6d61bdc7244ffaae8d89a095732fbce0e219ddb506b4f186f931a/hatchet_sdk-1.18.1.tar.gz", hash = "sha256:2d73a26330eaa6aba41095ebfd0d9259f01f1eb611c128ca96e54efa1122c19f", size = 344794, upload-time = "2025-08-26T21:25:17.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/1c/40baabcfc01560027d8ec7bd8b0924f535c56ae888a1a7c4341557aaf0f6/hatchet_sdk-1.18.1-py3-none-any.whl", hash = "sha256:13716ba69725ad180cd456c51ac9bf7836a0c57e1c7f785e8a09cdffa1ccff1a", size = 857997, upload-time = "2025-08-26T21:25:15.506Z" }, +] + [[package]] name = "hf-xet" version = "1.1.3" @@ -3043,6 +3168,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/dd/f0183ed0145e58cf9d286c1b2c14f63ccee987a4ff79ac85acc31b5d86bd/primp-0.15.0-cp38-abi3-win_amd64.whl", hash = "sha256:aeb6bd20b06dfc92cfe4436939c18de88a58c640752cf7f30d9e4ae893cdec32", size = 3149967, upload-time = "2025-04-17T11:41:07.067Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" }, +] + [[package]] name = "prompt-toolkit" version = "3.0.50" @@ -3146,16 +3280,16 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.3" +version = "5.29.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945, upload-time = "2025-01-08T21:38:51.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708, upload-time = "2025-01-08T21:38:31.799Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/aae8e10512b83de633f2646506a6d835b151edf4b30d18d73afd01447253/protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a", size = 434508, upload-time = "2025-01-08T21:38:35.489Z" }, - { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825, upload-time = "2025-01-08T21:38:36.642Z" }, - { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573, upload-time = "2025-01-08T21:38:37.896Z" }, - { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672, upload-time = "2025-01-08T21:38:40.204Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550, upload-time = "2025-01-08T21:38:50.439Z" }, + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, ] [[package]] @@ -3337,6 +3471,9 @@ dbos = [ examples = [ { name = "pydantic-ai-examples" }, ] +hatchet = [ + { name = "pydantic-ai-slim", extra = ["hatchet"] }, +] [package.dev-dependencies] dev = [ @@ -3387,8 +3524,9 @@ requires-dist = [ { name = "pydantic-ai-examples", marker = "extra == 'examples'", editable = "examples" }, { name = "pydantic-ai-slim", extras = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "vertexai"], editable = "pydantic_ai_slim" }, { name = "pydantic-ai-slim", extras = ["dbos"], marker = "extra == 'dbos'", editable = "pydantic_ai_slim" }, + { name = "pydantic-ai-slim", extras = ["hatchet"], marker = "extra == 'hatchet'", editable = "pydantic_ai_slim" }, ] -provides-extras = ["a2a", "dbos", "examples"] +provides-extras = ["a2a", "dbos", "examples", "hatchet"] [package.metadata.requires-dev] dev = [ @@ -3525,6 +3663,9 @@ google = [ groq = [ { name = "groq" }, ] +hatchet = [ + { name = "hatchet-sdk" }, +] huggingface = [ { name = "huggingface-hub", extra = ["inference"] }, ] @@ -3570,6 +3711,7 @@ requires-dist = [ { name = "google-genai", marker = "extra == 'google'", specifier = ">=1.31.0" }, { name = "griffe", specifier = ">=1.3.2" }, { name = "groq", marker = "extra == 'groq'", specifier = ">=0.25.0" }, + { name = "hatchet-sdk", marker = "extra == 'hatchet'", specifier = ">=1.18.1" }, { name = "httpx", specifier = ">=0.27" }, { name = "huggingface-hub", extras = ["inference"], marker = "extra == 'huggingface'", specifier = ">=0.33.5" }, { name = "logfire", extras = ["httpx"], marker = "extra == 'logfire'", specifier = ">=3.14.1" }, @@ -3590,7 +3732,7 @@ requires-dist = [ { name = "tenacity", marker = "extra == 'retries'", specifier = ">=8.2.3" }, { name = "typing-inspection", specifier = ">=0.4.0" }, ] -provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "hatchet", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] [[package]] name = "pydantic-core" @@ -4427,6 +4569,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/91/a43308dc82a0e32d80cd0dfdcfca401ecbd0f431ab45f24e48bb97b7800d/sentry_sdk-2.35.2-py2.py3-none-any.whl", hash = "sha256:38c98e3cbb620dd3dd80a8d6e39c753d453dd41f8a9df581b0584c19a52bc926", size = 363975, upload-time = "2025-09-01T11:00:56.574Z" }, ] +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" From d3d818aac5d3d7aced1c770121baa362fd337e45 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 13:17:37 -0400 Subject: [PATCH 04/42] feat: first pass at model impl --- .../durable_exec/hatchet/_agent.py | 14 ++--- .../durable_exec/hatchet/_model.py | 55 ++++++++++++++++--- .../durable_exec/hatchet/_utils.py | 3 +- pydantic_ai_slim/pyproject.toml | 2 +- uv.lock | 12 ++-- 5 files changed, 61 insertions(+), 25 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 3b759a3217..53816b31f7 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -1,5 +1,7 @@ from __future__ import annotations +from hatchet_sdk import Hatchet + from pydantic_ai.agent import AbstractAgent, WrapperAgent from pydantic_ai.output import OutputDataT from pydantic_ai.tools import ( @@ -11,18 +13,16 @@ class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): def __init__( self, wrapped: AbstractAgent[AgentDepsT, OutputDataT], + hatchet: Hatchet, ): - """Wrap an agent to enable it with DBOS durable workflows, by automatically offloading model requests, tool calls, and MCP server communication to DBOS steps. + """Wrap an agent to enable it with Hatchet durable tasks, by automatically offloading model requests, tool calls, and MCP server communication to Hatchet tasks. - After wrapping, the original agent can still be used as normal outside of the DBOS workflow. + After wrapping, the original agent can still be used as normal outside of the Hatchet workflow. Args: wrapped: The agent to wrap. - name: Optional unique agent name to use as the DBOS configured instance name. If not provided, the agent's `name` will be used. - event_stream_handler: Optional event stream handler to use instead of the one set on the wrapped agent. - mcp_step_config: The base DBOS step config to use for MCP server steps. If no config is provided, use the default settings of DBOS. - model_step_config: The DBOS step config to use for model request steps. If no config is provided, use the default settings of DBOS. + hatchet: The Hatchet instance to use for creating tasks. """ super().__init__(wrapped) - pass + self.hatchet = hatchet diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 4ed03d5474..92f3328342 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,16 +1,57 @@ from __future__ import annotations -from pydantic_ai.models import Model +from hatchet_sdk import DurableContext, Hatchet +from pydantic import BaseModel + +from pydantic_ai.messages import ( + ModelMessage, + ModelResponse, +) +from pydantic_ai.models import Model, ModelRequestParameters from pydantic_ai.models.wrapper import WrapperModel +from pydantic_ai.settings import ModelSettings + +from ._utils import TaskConfig class HatchetModel(WrapperModel): - """A wrapper for Model that integrates with DBOS, turning request and request_stream to DBOS steps.""" + """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" - def __init__( - self, - model: Model, - ): + def __init__(self, model: Model, *, task_config: TaskConfig, hatchet: Hatchet): super().__init__(model) + self.task_config = task_config + self.hatchet = hatchet + + class ModelInput(BaseModel): + messages: list[ModelMessage] + model_settings: ModelSettings | None + model_request_parameters: ModelRequestParameters + + @hatchet.durable_task( + name=self.task_config.name, + description=self.task_config.description, + input_validator=ModelInput, + on_events=self.task_config.on_events, + on_crons=self.task_config.on_crons, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_request_task( + input: ModelInput, + _ctx: DurableContext, + ) -> ModelResponse: + return await super(HatchetModel, self).request( + input.messages, input.model_settings, input.model_request_parameters + ) - pass + self._hatchet_wrapped_request_task = wrapped_request_task diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py index d34f683461..5594722a6d 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py @@ -10,9 +10,8 @@ class TaskConfig(BaseModel): - name: str | None = None + name: str description: str | None = None - input_validator: None = None on_events: list[str] | None = None on_crons: list[str] | None = None version: str | None = None diff --git a/pydantic_ai_slim/pyproject.toml b/pydantic_ai_slim/pyproject.toml index 7dfb544256..6ebc67be63 100644 --- a/pydantic_ai_slim/pyproject.toml +++ b/pydantic_ai_slim/pyproject.toml @@ -101,7 +101,7 @@ temporal = ["temporalio==1.17.0"] # DBOS dbos = ["dbos>=1.13.0"] # Hatchet -hatchet = ["hatchet-sdk>=1.18.1"] +hatchet = ["hatchet-sdk @ git+https://github.com/hatchet-dev/hatchet.git@mk/tweaks-for-pydantic-ai#subdirectory=sdks/python"] [tool.hatch.metadata] allow-direct-references = true diff --git a/uv.lock b/uv.lock index 2f053c74c3..ead8323df6 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1685,8 +1685,8 @@ wheels = [ [[package]] name = "hatchet-sdk" -version = "1.18.1" -source = { registry = "https://pypi.org/simple" } +version = "1.18.2" +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#41013c3d474c3e509f723948f84ade27f158373b" } dependencies = [ { name = "aiohttp" }, { name = "grpcio" }, @@ -1699,10 +1699,6 @@ dependencies = [ { name = "tenacity" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6c/10/5a2cf5c6d61bdc7244ffaae8d89a095732fbce0e219ddb506b4f186f931a/hatchet_sdk-1.18.1.tar.gz", hash = "sha256:2d73a26330eaa6aba41095ebfd0d9259f01f1eb611c128ca96e54efa1122c19f", size = 344794, upload-time = "2025-08-26T21:25:17.165Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/1c/40baabcfc01560027d8ec7bd8b0924f535c56ae888a1a7c4341557aaf0f6/hatchet_sdk-1.18.1-py3-none-any.whl", hash = "sha256:13716ba69725ad180cd456c51ac9bf7836a0c57e1c7f785e8a09cdffa1ccff1a", size = 857997, upload-time = "2025-08-26T21:25:15.506Z" }, -] [[package]] name = "hf-xet" @@ -3711,7 +3707,7 @@ requires-dist = [ { name = "google-genai", marker = "extra == 'google'", specifier = ">=1.31.0" }, { name = "griffe", specifier = ">=1.3.2" }, { name = "groq", marker = "extra == 'groq'", specifier = ">=0.25.0" }, - { name = "hatchet-sdk", marker = "extra == 'hatchet'", specifier = ">=1.18.1" }, + { name = "hatchet-sdk", marker = "extra == 'hatchet'", git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai" }, { name = "httpx", specifier = ">=0.27" }, { name = "huggingface-hub", extras = ["inference"], marker = "extra == 'huggingface'", specifier = ">=0.33.5" }, { name = "logfire", extras = ["httpx"], marker = "extra == 'logfire'", specifier = ">=3.14.1" }, From 16f6e6a7a45d7194e97ea80def0d49a275cf53a7 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 13:47:24 -0400 Subject: [PATCH 05/42] feat: tool calling implementation for mcp server --- .../durable_exec/hatchet/_mcp_server.py | 113 ++++++++++++++++-- uv.lock | 2 +- 2 files changed, 106 insertions(+), 9 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 8bcdeee8c2..91f0276324 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -1,22 +1,119 @@ from __future__ import annotations from abc import ABC -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Generic, TypeVar -from pydantic_ai.tools import AgentDepsT +from hatchet_sdk import DurableContext, Hatchet +from pydantic import BaseModel + +from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.toolsets.abstract import ToolsetTool from pydantic_ai.toolsets.wrapper import WrapperToolset +from ._utils import TaskConfig + if TYPE_CHECKING: - from pydantic_ai.mcp import MCPServer + from pydantic_ai.mcp import MCPServer, ToolResult + +T = TypeVar('T') + + +class GetToolsInput(BaseModel, Generic[AgentDepsT]): + ctx: RunContext[AgentDepsT] + + +class CallToolInput(BaseModel, Generic[AgentDepsT]): + name: str + tool_args: dict[str, Any] + ctx: RunContext[AgentDepsT] + tool: ToolsetTool[AgentDepsT] + + +class CallToolOutput(BaseModel): + result: ToolResult class HatchetMCPServer(WrapperToolset[AgentDepsT], ABC): """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" - def __init__( - self, - wrapped: MCPServer, - ): + def __init__(self, wrapped: MCPServer, *, task_config: TaskConfig, hatchet: Hatchet): super().__init__(wrapped) + self.task_config = task_config + self.hatchet = hatchet + + @hatchet.durable_task( + name=f'{self.task_config.name}.get_tools', + description=self.task_config.description, + input_validator=GetToolsInput[AgentDepsT], + on_events=self.task_config.on_events, + on_crons=self.task_config.on_crons, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_get_tools_task( + input: GetToolsInput[AgentDepsT], + ctx: DurableContext, + ) -> dict[str, ToolsetTool[AgentDepsT]]: + return await super(HatchetMCPServer, self).get_tools(input.ctx) + + self._hatchet_wrapped_get_tools_task = wrapped_get_tools_task + + @hatchet.durable_task( + name=f'{self.task_config.name}.get_tools', + description=self.task_config.description, + input_validator=CallToolInput[AgentDepsT], + on_events=self.task_config.on_events, + on_crons=self.task_config.on_crons, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_call_tool_task( + input: CallToolInput[AgentDepsT], + _ctx: DurableContext, + ) -> CallToolOutput[AgentDepsT]: + result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, input.ctx, input.tool) + + return CallToolOutput[AgentDepsT](result=result) + + self._hatchet_wrapped_call_tool_task = wrapped_call_tool_task + + async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: + return await self._hatchet_wrapped_get_tools_task.aio_run(GetToolsInput(ctx=ctx)) + + async def call_tool( + self, + name: str, + tool_args: dict[str, Any], + ctx: RunContext[AgentDepsT], + tool: ToolsetTool[AgentDepsT], + ) -> ToolResult: + wrapped_tool_output = await self._hatchet_wrapped_call_tool_task.aio_run( + CallToolInput( + name=name, + tool_args=tool_args, + ctx=ctx, + tool=tool, + ) + ) - pass + return wrapped_tool_output.result diff --git a/uv.lock b/uv.lock index ead8323df6..c214c2d2b9 100644 --- a/uv.lock +++ b/uv.lock @@ -1686,7 +1686,7 @@ wheels = [ [[package]] name = "hatchet-sdk" version = "1.18.2" -source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#41013c3d474c3e509f723948f84ade27f158373b" } +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#4962eeb0f7463f731a886c901432bea412f4d7fa" } dependencies = [ { name = "aiohttp" }, { name = "grpcio" }, From 698dfdfe1634bccb3a2162f44987bff968feaa2e Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 13:48:49 -0400 Subject: [PATCH 06/42] feat: implement request method for model --- .../durable_exec/hatchet/_model.py | 25 +++++++++++++++---- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 92f3328342..a337155f7c 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -14,6 +14,12 @@ from ._utils import TaskConfig +class ModelInput(BaseModel): + messages: list[ModelMessage] + model_settings: ModelSettings | None + model_request_parameters: ModelRequestParameters + + class HatchetModel(WrapperModel): """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" @@ -22,11 +28,6 @@ def __init__(self, model: Model, *, task_config: TaskConfig, hatchet: Hatchet): self.task_config = task_config self.hatchet = hatchet - class ModelInput(BaseModel): - messages: list[ModelMessage] - model_settings: ModelSettings | None - model_request_parameters: ModelRequestParameters - @hatchet.durable_task( name=self.task_config.name, description=self.task_config.description, @@ -55,3 +56,17 @@ async def wrapped_request_task( ) self._hatchet_wrapped_request_task = wrapped_request_task + + async def request( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + ) -> ModelResponse: + return await self._hatchet_wrapped_request_task.aio_run( + ModelInput( + messages=messages, + model_settings=model_settings, + model_request_parameters=model_request_parameters, + ) + ) From db0d2ff7130630751e2d656fd8e2d73bdef72656 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 14:15:53 -0400 Subject: [PATCH 07/42] feat: more work on agent --- .../durable_exec/hatchet/_agent.py | 46 ++++++++++++- .../durable_exec/hatchet/_mcp_server.py | 69 +++++++++---------- .../durable_exec/hatchet/_model.py | 7 +- .../durable_exec/hatchet/_utils.py | 3 - 4 files changed, 82 insertions(+), 43 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 53816b31f7..b89c4c442a 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -3,10 +3,15 @@ from hatchet_sdk import Hatchet from pydantic_ai.agent import AbstractAgent, WrapperAgent +from pydantic_ai.exceptions import UserError from pydantic_ai.output import OutputDataT from pydantic_ai.tools import ( AgentDepsT, ) +from pydantic_ai.toolsets import AbstractToolset + +from ._model import HatchetModel +from ._utils import TaskConfig class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): @@ -14,6 +19,10 @@ def __init__( self, wrapped: AbstractAgent[AgentDepsT, OutputDataT], hatchet: Hatchet, + *, + name: str | None = None, + mcp_task_config: TaskConfig | None = None, + model_task_config: TaskConfig | None = None, ): """Wrap an agent to enable it with Hatchet durable tasks, by automatically offloading model requests, tool calls, and MCP server communication to Hatchet tasks. @@ -25,4 +34,39 @@ def __init__( """ super().__init__(wrapped) - self.hatchet = hatchet + self._name = name or wrapped.name + self._hatchet = hatchet + + if not self._name: + raise UserError( + "An agent needs to have a unique `name` in order to be used with DBOS. The name will be used to identify the agent's workflows and steps." + ) + + self._model = HatchetModel( + wrapped.model, + task_name_prefix=self._name, + task_config=model_task_config or TaskConfig(), + hatchet=self._hatchet, + ) + hatchet_agent_name = self._name + + def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: + # Replace MCPServer with DBOSMCPServer + try: + from pydantic_ai.mcp import MCPServer + + from ._mcp_server import HatchetMCPServer + except ImportError: + pass + else: + if isinstance(toolset, MCPServer): + return HatchetMCPServer[AgentDepsT]( + wrapped=toolset, + hatchet=hatchet, + task_name_prefix=hatchet_agent_name, + task_config=mcp_task_config or TaskConfig(), + ) + + return toolset + + self._toolsets = [toolset.visit_and_replace(hatchetify_toolset) for toolset in wrapped.toolsets] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 91f0276324..3a013328af 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -36,29 +36,30 @@ class CallToolOutput(BaseModel): class HatchetMCPServer(WrapperToolset[AgentDepsT], ABC): """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" - def __init__(self, wrapped: MCPServer, *, task_config: TaskConfig, hatchet: Hatchet): + def __init__(self, wrapped: MCPServer, *, hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig): super().__init__(wrapped) - self.task_config = task_config - self.hatchet = hatchet + self._task_config = task_config + self._task_name_prefix = task_name_prefix + self._hatchet = hatchet + id_suffix = f'__{wrapped.id}' if wrapped.id else '' + self._name = f'{task_name_prefix}__mcp_server{id_suffix}' @hatchet.durable_task( - name=f'{self.task_config.name}.get_tools', - description=self.task_config.description, + name=f'{self._name}.get_tools', + description=self._task_config.description, input_validator=GetToolsInput[AgentDepsT], - on_events=self.task_config.on_events, - on_crons=self.task_config.on_crons, - version=self.task_config.version, - sticky=self.task_config.sticky, - default_priority=self.task_config.default_priority, - concurrency=self.task_config.concurrency, - schedule_timeout=self.task_config.schedule_timeout, - execution_timeout=self.task_config.execution_timeout, - retries=self.task_config.retries, - rate_limits=self.task_config.rate_limits, - desired_worker_labels=self.task_config.desired_worker_labels, - backoff_factor=self.task_config.backoff_factor, - backoff_max_seconds=self.task_config.backoff_max_seconds, - default_filters=self.task_config.default_filters, + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, ) async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], @@ -69,23 +70,21 @@ async def wrapped_get_tools_task( self._hatchet_wrapped_get_tools_task = wrapped_get_tools_task @hatchet.durable_task( - name=f'{self.task_config.name}.get_tools', - description=self.task_config.description, + name=f'{self._name}.get_tools', + description=self._task_config.description, input_validator=CallToolInput[AgentDepsT], - on_events=self.task_config.on_events, - on_crons=self.task_config.on_crons, - version=self.task_config.version, - sticky=self.task_config.sticky, - default_priority=self.task_config.default_priority, - concurrency=self.task_config.concurrency, - schedule_timeout=self.task_config.schedule_timeout, - execution_timeout=self.task_config.execution_timeout, - retries=self.task_config.retries, - rate_limits=self.task_config.rate_limits, - desired_worker_labels=self.task_config.desired_worker_labels, - backoff_factor=self.task_config.backoff_factor, - backoff_max_seconds=self.task_config.backoff_max_seconds, - default_filters=self.task_config.default_filters, + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, ) async def wrapped_call_tool_task( input: CallToolInput[AgentDepsT], diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index a337155f7c..b7a19ce0d4 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -23,17 +23,16 @@ class ModelInput(BaseModel): class HatchetModel(WrapperModel): """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" - def __init__(self, model: Model, *, task_config: TaskConfig, hatchet: Hatchet): + def __init__(self, model: Model, *, task_name_prefix: str, task_config: TaskConfig, hatchet: Hatchet): super().__init__(model) self.task_config = task_config self.hatchet = hatchet + self._task_name_prefix = task_name_prefix @hatchet.durable_task( - name=self.task_config.name, + name=f'{self._task_name_prefix}__model.request', description=self.task_config.description, input_validator=ModelInput, - on_events=self.task_config.on_events, - on_crons=self.task_config.on_crons, version=self.task_config.version, sticky=self.task_config.sticky, default_priority=self.task_config.default_priority, diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py index 5594722a6d..8a459b6eba 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_utils.py @@ -10,10 +10,7 @@ class TaskConfig(BaseModel): - name: str description: str | None = None - on_events: list[str] | None = None - on_crons: list[str] | None = None version: str | None = None sticky: StickyStrategy | None = None default_priority: int = 1 From 3ae9fcfb2f32b9467da3e6a430a323d157b1c8ae Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 14:24:46 -0400 Subject: [PATCH 08/42] feat: agent run method impl --- .../durable_exec/hatchet/_agent.py | 166 +++++++++++++++++- 1 file changed, 161 insertions(+), 5 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index b89c4c442a..abade65abf 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -1,12 +1,26 @@ from __future__ import annotations -from hatchet_sdk import Hatchet +from collections.abc import Iterator, Sequence +from contextlib import contextmanager +from typing import Any, Generic, overload -from pydantic_ai.agent import AbstractAgent, WrapperAgent +from hatchet_sdk import DurableContext, Hatchet +from pydantic import BaseModel, Field +from typing_extensions import Never + +from pydantic_ai import ( + messages as _messages, + models, + usage as _usage, +) +from pydantic_ai.agent import AbstractAgent, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent from pydantic_ai.exceptions import UserError -from pydantic_ai.output import OutputDataT +from pydantic_ai.models import Model +from pydantic_ai.output import OutputDataT, OutputSpec +from pydantic_ai.settings import ModelSettings from pydantic_ai.tools import ( AgentDepsT, + DeferredToolResults, ) from pydantic_ai.toolsets import AbstractToolset @@ -14,6 +28,22 @@ from ._utils import TaskConfig +class RunAgentInput(BaseModel, Generic[RunOutputDataT, AgentDepsT]): + user_prompt: str | Sequence[_messages.UserContent] | None = None + output_type: OutputSpec[RunOutputDataT] | None = None + message_history: list[_messages.ModelMessage] | None = None + deferred_tool_results: DeferredToolResults | None = None + model: models.Model | models.KnownModelName | str | None = None + deps: AgentDepsT + model_settings: ModelSettings | None = None + usage_limits: _usage.UsageLimits | None = None + usage: _usage.RunUsage | None = None + infer_name: bool = True + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None + deprecated_kwargs: dict[str, Any] = Field(default_factory=dict) + + class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): def __init__( self, @@ -39,7 +69,12 @@ def __init__( if not self._name: raise UserError( - "An agent needs to have a unique `name` in order to be used with DBOS. The name will be used to identify the agent's workflows and steps." + "An agent needs to have a unique `name` in order to be used with Hatchet. The name will be used to identify the agent's workflows and tasks." + ) + + if not isinstance(wrapped.model, Model): + raise UserError( + 'An agent needs to have a `model` in order to be used with Hatchet, it cannot be set at agent run time.' ) self._model = HatchetModel( @@ -51,7 +86,7 @@ def __init__( hatchet_agent_name = self._name def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: - # Replace MCPServer with DBOSMCPServer + # Replace MCPServer with HatchetMCPServer try: from pydantic_ai.mcp import MCPServer @@ -70,3 +105,124 @@ def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[ return toolset self._toolsets = [toolset.visit_and_replace(hatchetify_toolset) for toolset in wrapped.toolsets] + + @hatchet.durable_task(name=f'{self._name}.run', input_validator=RunAgentInput[Any, Any]) + async def wrapped_run_workflow( + input: RunAgentInput[RunOutputDataT, AgentDepsT], + ctx: DurableContext, + ) -> AgentRunResult[Any]: + with self._hatchet_overrides(): + return await super(WrapperAgent, self).run( + input.user_prompt, + output_type=input.output_type, + message_history=input.message_history, + deferred_tool_results=input.deferred_tool_results, + model=input.model, + deps=input.deps, + model_settings=input.model_settings, + usage_limits=input.usage_limits, + usage=input.usage, + infer_name=input.infer_name, + toolsets=input.toolsets, + event_stream_handler=input.event_stream_handler, + **input.deprecated_kwargs, + ) + + self.hatchet_wrapped_run_workflow = wrapped_run_workflow + + @property + def name(self) -> str | None: + return self._name + + @name.setter + def name(self, value: str | None) -> None: # pragma: no cover + raise UserError( + 'The agent name cannot be changed after creation. If you need to change the name, create a new agent.' + ) + + @property + def model(self) -> Model: + return self._model + + @property + def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: + with self._hatchet_overrides(): + return super().toolsets + + @contextmanager + def _hatchet_overrides(self) -> Iterator[None]: + # Override with HatchetModel and HatchetMCPServer in the toolsets. + with super().override(model=self._model, toolsets=self._toolsets, tools=[]): + yield + + @overload + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[OutputDataT]: ... + + @overload + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[RunOutputDataT]: ... + + async def run( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AgentRunResult[Any]: + """Run the agent with a user prompt in async mode.""" + return await self.hatchet_wrapped_run_workflow.aio_run( + RunAgentInput[RunOutputDataT, AgentDepsT]( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + deprecated_kwargs=_deprecated_kwargs, + ) + ) From 937aa97392475bcdd48d792f7efc49e3046eb16c Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 14:49:40 -0400 Subject: [PATCH 09/42] fix: allow arbitrary types in i/o models --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 4 +++- .../pydantic_ai/durable_exec/hatchet/_mcp_server.py | 8 +++++++- .../pydantic_ai/durable_exec/hatchet/_model.py | 4 +++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index abade65abf..024c968424 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -5,7 +5,7 @@ from typing import Any, Generic, overload from hatchet_sdk import DurableContext, Hatchet -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field from typing_extensions import Never from pydantic_ai import ( @@ -29,6 +29,8 @@ class RunAgentInput(BaseModel, Generic[RunOutputDataT, AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + user_prompt: str | Sequence[_messages.UserContent] | None = None output_type: OutputSpec[RunOutputDataT] | None = None message_history: list[_messages.ModelMessage] | None = None diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 3a013328af..ae10a2e99b 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar from hatchet_sdk import DurableContext, Hatchet -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from pydantic_ai.tools import AgentDepsT, RunContext from pydantic_ai.toolsets.abstract import ToolsetTool @@ -19,10 +19,14 @@ class GetToolsInput(BaseModel, Generic[AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + ctx: RunContext[AgentDepsT] class CallToolInput(BaseModel, Generic[AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + name: str tool_args: dict[str, Any] ctx: RunContext[AgentDepsT] @@ -30,6 +34,8 @@ class CallToolInput(BaseModel, Generic[AgentDepsT]): class CallToolOutput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + result: ToolResult diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index b7a19ce0d4..074af8d4bf 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,7 +1,7 @@ from __future__ import annotations from hatchet_sdk import DurableContext, Hatchet -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from pydantic_ai.messages import ( ModelMessage, @@ -15,6 +15,8 @@ class ModelInput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + messages: list[ModelMessage] model_settings: ModelSettings | None model_request_parameters: ModelRequestParameters From 27036c7f5e528f6bfd6dfe6aa9cf4633588fa038 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 17:32:33 -0400 Subject: [PATCH 10/42] fix: clean up types + workflow registration --- .../durable_exec/hatchet/_agent.py | 36 ++++++++++++++++--- .../durable_exec/hatchet/_mcp_server.py | 10 +++--- .../durable_exec/hatchet/_model.py | 4 +-- 3 files changed, 39 insertions(+), 11 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 024c968424..6fe888a17f 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -5,7 +5,8 @@ from typing import Any, Generic, overload from hatchet_sdk import DurableContext, Hatchet -from pydantic import BaseModel, ConfigDict, Field +from hatchet_sdk.runnables.workflow import BaseWorkflow +from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never from pydantic_ai import ( @@ -63,6 +64,9 @@ def __init__( Args: wrapped: The agent to wrap. hatchet: The Hatchet instance to use for creating tasks. + name: Optional unique agent name to use in the Hatchet tasks' names. If not provided, the agent's `name` will be used. + mcp_task_config: The base Hatchet task config to use for MCP server tasks. If no config is provided, use the default settings. + model_task_config: The Hatchet task config to use for model request tasks. If no config is provided, use the default settings. """ super().__init__(wrapped) @@ -111,7 +115,7 @@ def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[ @hatchet.durable_task(name=f'{self._name}.run', input_validator=RunAgentInput[Any, Any]) async def wrapped_run_workflow( input: RunAgentInput[RunOutputDataT, AgentDepsT], - ctx: DurableContext, + _ctx: DurableContext, ) -> AgentRunResult[Any]: with self._hatchet_overrides(): return await super(WrapperAgent, self).run( @@ -153,10 +157,29 @@ def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: @contextmanager def _hatchet_overrides(self) -> Iterator[None]: - # Override with HatchetModel and HatchetMCPServer in the toolsets. with super().override(model=self._model, toolsets=self._toolsets, tools=[]): yield + @property + def workflows(self) -> Sequence[BaseWorkflow[Any]]: + workflows = [ + self.hatchet_wrapped_run_workflow, + self._model._hatchet_wrapped_request_task, + ] + + for toolset in self._toolsets: + from ._mcp_server import HatchetMCPServer + + if isinstance(toolset, HatchetMCPServer): + workflows.extend( + [ + toolset.hatchet_wrapped_get_tools_task, + toolset.hatchet_wrapped_call_tool_task, + ] + ) + + return workflows + @overload async def run( self, @@ -211,7 +234,7 @@ async def run( **_deprecated_kwargs: Never, ) -> AgentRunResult[Any]: """Run the agent with a user prompt in async mode.""" - return await self.hatchet_wrapped_run_workflow.aio_run( + result = await self.hatchet_wrapped_run_workflow.aio_run( RunAgentInput[RunOutputDataT, AgentDepsT]( user_prompt=user_prompt, output_type=output_type, @@ -228,3 +251,8 @@ async def run( deprecated_kwargs=_deprecated_kwargs, ) ) + + if isinstance(result, dict): + return TypeAdapter(AgentRunResult[Any]).validate_python(result) + + return result diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index ae10a2e99b..ead577de9e 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -73,10 +73,10 @@ async def wrapped_get_tools_task( ) -> dict[str, ToolsetTool[AgentDepsT]]: return await super(HatchetMCPServer, self).get_tools(input.ctx) - self._hatchet_wrapped_get_tools_task = wrapped_get_tools_task + self.hatchet_wrapped_get_tools_task = wrapped_get_tools_task @hatchet.durable_task( - name=f'{self._name}.get_tools', + name=f'{self._name}.call_tool', description=self._task_config.description, input_validator=CallToolInput[AgentDepsT], version=self._task_config.version, @@ -100,10 +100,10 @@ async def wrapped_call_tool_task( return CallToolOutput[AgentDepsT](result=result) - self._hatchet_wrapped_call_tool_task = wrapped_call_tool_task + self.hatchet_wrapped_call_tool_task = wrapped_call_tool_task async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: - return await self._hatchet_wrapped_get_tools_task.aio_run(GetToolsInput(ctx=ctx)) + return await self.hatchet_wrapped_get_tools_task.aio_run(GetToolsInput(ctx=ctx)) async def call_tool( self, @@ -112,7 +112,7 @@ async def call_tool( ctx: RunContext[AgentDepsT], tool: ToolsetTool[AgentDepsT], ) -> ToolResult: - wrapped_tool_output = await self._hatchet_wrapped_call_tool_task.aio_run( + wrapped_tool_output = await self.hatchet_wrapped_call_tool_task.aio_run( CallToolInput( name=name, tool_args=tool_args, diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 074af8d4bf..fceef61594 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -56,7 +56,7 @@ async def wrapped_request_task( input.messages, input.model_settings, input.model_request_parameters ) - self._hatchet_wrapped_request_task = wrapped_request_task + self.hatchet_wrapped_request_task = wrapped_request_task async def request( self, @@ -64,7 +64,7 @@ async def request( model_settings: ModelSettings | None, model_request_parameters: ModelRequestParameters, ) -> ModelResponse: - return await self._hatchet_wrapped_request_task.aio_run( + return await self.hatchet_wrapped_request_task.aio_run( ModelInput( messages=messages, model_settings=model_settings, From f993b69caaaf2d7d5107156a35a762516beddce9 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Tue, 16 Sep 2025 18:35:47 -0400 Subject: [PATCH 11/42] feat: first pass at toolsets --- .../durable_exec/hatchet/_agent.py | 42 +++----- .../durable_exec/hatchet/_function_toolset.py | 95 +++++++++++++++++++ .../durable_exec/hatchet/_mcp_server.py | 13 ++- .../durable_exec/hatchet/_toolset.py | 74 +++++++++++++++ 4 files changed, 194 insertions(+), 30 deletions(-) create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 6fe888a17f..db746a09c9 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -92,23 +92,14 @@ def __init__( hatchet_agent_name = self._name def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: - # Replace MCPServer with HatchetMCPServer - try: - from pydantic_ai.mcp import MCPServer - - from ._mcp_server import HatchetMCPServer - except ImportError: - pass - else: - if isinstance(toolset, MCPServer): - return HatchetMCPServer[AgentDepsT]( - wrapped=toolset, - hatchet=hatchet, - task_name_prefix=hatchet_agent_name, - task_config=mcp_task_config or TaskConfig(), - ) - - return toolset + from ._toolset import hatchetize_toolset + + return hatchetize_toolset( + toolset, + hatchet=hatchet, + task_name_prefix=hatchet_agent_name, + task_config=mcp_task_config or TaskConfig(), + ) self._toolsets = [toolset.visit_and_replace(hatchetify_toolset) for toolset in wrapped.toolsets] @@ -162,21 +153,16 @@ def _hatchet_overrides(self) -> Iterator[None]: @property def workflows(self) -> Sequence[BaseWorkflow[Any]]: - workflows = [ + workflows: list[BaseWorkflow[Any]] = [ self.hatchet_wrapped_run_workflow, - self._model._hatchet_wrapped_request_task, + self._model.hatchet_wrapped_request_task, ] for toolset in self._toolsets: - from ._mcp_server import HatchetMCPServer - - if isinstance(toolset, HatchetMCPServer): - workflows.extend( - [ - toolset.hatchet_wrapped_get_tools_task, - toolset.hatchet_wrapped_call_tool_task, - ] - ) + from ._toolset import HatchetWrapperToolset + + if isinstance(toolset, HatchetWrapperToolset): + workflows.extend(toolset.hatchet_tasks) return workflows diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py new file mode 100644 index 0000000000..7c8961947d --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import Any, Generic + +from hatchet_sdk import Context, Hatchet +from hatchet_sdk.runnables.workflow import Standalone +from pydantic import BaseModel, ConfigDict + +from pydantic_ai.exceptions import UserError +from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.toolsets import FunctionToolset, ToolsetTool + +from ._toolset import HatchetWrapperToolset +from ._utils import TaskConfig + + +class CallToolInput(BaseModel, Generic[AgentDepsT]): + model_config = ConfigDict(arbitrary_types_allowed=True) + + name: str + tool_args: dict[str, Any] + ctx: RunContext[AgentDepsT] + + +class HatchetFunctionToolset(HatchetWrapperToolset[AgentDepsT]): + """A wrapper for FunctionToolset that integrates with Hatchet, turning tool calls into Hatchet tasks.""" + + def __init__( + self, wrapped: FunctionToolset[AgentDepsT], *, hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig + ): + super().__init__(wrapped) + self._task_config = task_config + self._task_name_prefix = task_name_prefix + self._hatchet = hatchet + self._tool_tasks: dict[str, Standalone[Any, Any]] = {} + + for tool_name, tool in wrapped.tools.items(): + task_name = f'{task_name_prefix}__function_tool__{tool_name}' + + def make_tool_task(current_tool_name: str, current_tool: Any): + @hatchet.task( + name=task_name, + description=self._task_config.description, + input_validator=CallToolInput[AgentDepsT], + version=self._task_config.version, + sticky=self._task_config.sticky, + default_priority=self._task_config.default_priority, + concurrency=self._task_config.concurrency, + schedule_timeout=self._task_config.schedule_timeout, + execution_timeout=self._task_config.execution_timeout, + retries=self._task_config.retries, + rate_limits=self._task_config.rate_limits, + desired_worker_labels=self._task_config.desired_worker_labels, + backoff_factor=self._task_config.backoff_factor, + backoff_max_seconds=self._task_config.backoff_max_seconds, + default_filters=self._task_config.default_filters, + ) + async def tool_task( + input: CallToolInput[AgentDepsT], + _ctx: Context, + ) -> Any: + return await super(HatchetFunctionToolset, self).call_tool( + current_tool_name, input.tool_args, input.ctx, current_tool + ) + + return tool_task + + self._tool_tasks[tool_name] = make_tool_task(tool_name, tool) + + @property + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + return list(self._tool_tasks.values()) + + async def call_tool( + self, + name: str, + tool_args: dict[str, Any], + ctx: RunContext[AgentDepsT], + tool: ToolsetTool[AgentDepsT], + ) -> Any: + if name not in self._tool_tasks: + raise UserError( + f'Tool {name!r} not found in toolset {self.id!r}. ' + 'Removing or renaming tools during an agent run is not supported with Hatchet.' + ) + + tool_task = self._tool_tasks[name] + return await tool_task.aio_run( + CallToolInput( + name=name, + tool_args=tool_args, + ctx=ctx, + ) + ) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index ead577de9e..8692a1efe0 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -4,12 +4,13 @@ from typing import TYPE_CHECKING, Any, Generic, TypeVar from hatchet_sdk import DurableContext, Hatchet +from hatchet_sdk.runnables.workflow import Standalone from pydantic import BaseModel, ConfigDict from pydantic_ai.tools import AgentDepsT, RunContext from pydantic_ai.toolsets.abstract import ToolsetTool -from pydantic_ai.toolsets.wrapper import WrapperToolset +from ._toolset import HatchetWrapperToolset from ._utils import TaskConfig if TYPE_CHECKING: @@ -39,7 +40,7 @@ class CallToolOutput(BaseModel): result: ToolResult -class HatchetMCPServer(WrapperToolset[AgentDepsT], ABC): +class HatchetMCPServer(HatchetWrapperToolset[AgentDepsT], ABC): """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" def __init__(self, wrapped: MCPServer, *, hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig): @@ -102,6 +103,14 @@ async def wrapped_call_tool_task( self.hatchet_wrapped_call_tool_task = wrapped_call_tool_task + @property + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + return [ + self.hatchet_wrapped_get_tools_task, + self.hatchet_wrapped_call_tool_task, + ] + async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: return await self.hatchet_wrapped_get_tools_task.aio_run(GetToolsInput(ctx=ctx)) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py new file mode 100644 index 0000000000..4afa929099 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Callable +from typing import Any + +from hatchet_sdk.runnables.workflow import Standalone + +from pydantic_ai.tools import AgentDepsT +from pydantic_ai.toolsets.abstract import AbstractToolset +from pydantic_ai.toolsets.function import FunctionToolset +from pydantic_ai.toolsets.wrapper import WrapperToolset + +from ._utils import TaskConfig + + +class HatchetWrapperToolset(WrapperToolset[AgentDepsT], ABC): + @property + def id(self) -> str: + assert self.wrapped.id is not None + return self.wrapped.id + + @property + @abstractmethod + def hatchet_tasks(self) -> list[Standalone[Any, Any]]: + """Return the list of Hatchet tasks for this toolset.""" + raise NotImplementedError + + def visit_and_replace( + self, visitor: Callable[[AbstractToolset[AgentDepsT]], AbstractToolset[AgentDepsT]] + ) -> AbstractToolset[AgentDepsT]: + return self + + +def hatchetize_toolset( + toolset: AbstractToolset[AgentDepsT], + hatchet: Any, # Hatchet instance + task_name_prefix: str, + task_config: TaskConfig, +) -> AbstractToolset[AgentDepsT]: + """Hatchetize a toolset. + + Args: + toolset: The toolset to hatchetize. + hatchet: The Hatchet instance to use for creating tasks. + task_name_prefix: Prefix for Hatchet task names. + task_config: The Hatchet task config to use. + """ + if isinstance(toolset, FunctionToolset): + from ._function_toolset import HatchetFunctionToolset + + return HatchetFunctionToolset( + toolset, + hatchet=hatchet, + task_name_prefix=task_name_prefix, + task_config=task_config, + ) + + try: + from pydantic_ai.mcp import MCPServer + + from ._mcp_server import HatchetMCPServer + except ImportError: + pass + else: + if isinstance(toolset, MCPServer): + return HatchetMCPServer( + toolset, + hatchet=hatchet, + task_name_prefix=task_name_prefix, + task_config=task_config, + ) + + return toolset From ee5e872ea3a9014083709e779aaecdc65e43ed27 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 11:00:27 -0400 Subject: [PATCH 12/42] feat: hatchet run context --- .../durable_exec/hatchet/_run_context.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py new file mode 100644 index 0000000000..eed5a455d8 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from typing import Any + +from pydantic_ai.exceptions import UserError +from pydantic_ai.tools import AgentDepsT, RunContext + + +class HatchetRunContext(RunContext[AgentDepsT]): + """The [`RunContext`][pydantic_ai.tools.RunContext] subclass to use to serialize and deserialize the run context for use inside a Hatchet task. + + By default, only the `deps`, `retries`, `tool_call_id`, `tool_name`, `tool_call_approved`, `retry` and `run_step` attributes will be available. + To make another attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to [`HatchetAgent`][pydantic_ai.durable_exec.hatchet.HatchetAgent]. + """ + + def __init__(self, deps: AgentDepsT, **kwargs: Any): + self.__dict__ = {**kwargs, 'deps': deps} + setattr( + self, + '__dataclass_fields__', + {name: field for name, field in RunContext.__dataclass_fields__.items() if name in self.__dict__}, + ) + + def __getattribute__(self, name: str) -> Any: + try: + return super().__getattribute__(name) + except AttributeError as e: # pragma: no cover + if name in RunContext.__dataclass_fields__: + raise UserError( + f'{self.__class__.__name__!r} object has no attribute {name!r}. ' + 'To make the attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to `HatchetAgent`.' + ) + else: + raise e + + @classmethod + def serialize_run_context(cls, ctx: RunContext[Any]) -> dict[str, Any]: + """Serialize the run context to a `dict[str, Any]`.""" + return { + 'retries': ctx.retries, + 'tool_call_id': ctx.tool_call_id, + 'tool_name': ctx.tool_name, + 'tool_call_approved': ctx.tool_call_approved, + 'retry': ctx.retry, + 'run_step': ctx.run_step, + } + + @classmethod + def deserialize_run_context(cls, ctx: dict[str, Any], deps: AgentDepsT) -> HatchetRunContext[AgentDepsT]: + """Deserialize the run context from a `dict[str, Any]`.""" + return cls(**ctx, deps=deps) From fecdc7e9ebb23441dd8a170062b0de5849cda671 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 11:01:28 -0400 Subject: [PATCH 13/42] chore: ignore local files for testing --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore | 1 + 1 file changed, 1 insertion(+) create mode 100644 pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore new file mode 100644 index 0000000000..4083037423 --- /dev/null +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/.gitignore @@ -0,0 +1 @@ +local From 052276f1db4d77388aba0fa4d8eb8394a58cb5fb Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 11:01:42 -0400 Subject: [PATCH 14/42] chore: bump hatchet --- uv.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/uv.lock b/uv.lock index c214c2d2b9..2c19105148 100644 --- a/uv.lock +++ b/uv.lock @@ -1686,7 +1686,7 @@ wheels = [ [[package]] name = "hatchet-sdk" version = "1.18.2" -source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#4962eeb0f7463f731a886c901432bea412f4d7fa" } +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#2bb989b6eb381670c2059f113286132342b0f46b" } dependencies = [ { name = "aiohttp" }, { name = "grpcio" }, From 6476b4096dda6f0f2d8cbb489c9d8a02ae164421 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 11:48:10 -0400 Subject: [PATCH 15/42] feat: use hatchet run context in mcp server --- .../durable_exec/hatchet/_mcp_server.py | 40 +++++++++++++++---- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 8692a1efe0..af17e0e272 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -10,6 +10,7 @@ from pydantic_ai.tools import AgentDepsT, RunContext from pydantic_ai.toolsets.abstract import ToolsetTool +from ._run_context import HatchetRunContext from ._toolset import HatchetWrapperToolset from ._utils import TaskConfig @@ -22,7 +23,8 @@ class GetToolsInput(BaseModel, Generic[AgentDepsT]): model_config = ConfigDict(arbitrary_types_allowed=True) - ctx: RunContext[AgentDepsT] + serialized_run_context: Any + deps: AgentDepsT class CallToolInput(BaseModel, Generic[AgentDepsT]): @@ -30,9 +32,11 @@ class CallToolInput(BaseModel, Generic[AgentDepsT]): name: str tool_args: dict[str, Any] - ctx: RunContext[AgentDepsT] tool: ToolsetTool[AgentDepsT] + serialized_run_context: Any + deps: AgentDepsT + class CallToolOutput(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) @@ -43,13 +47,23 @@ class CallToolOutput(BaseModel): class HatchetMCPServer(HatchetWrapperToolset[AgentDepsT], ABC): """A wrapper for MCPServer that integrates with Hatchet, turning call_tool and get_tools to Hatchet tasks.""" - def __init__(self, wrapped: MCPServer, *, hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig): + def __init__( + self, + wrapped: MCPServer, + *, + hatchet: Hatchet, + task_name_prefix: str, + task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], + ): super().__init__(wrapped) self._task_config = task_config self._task_name_prefix = task_name_prefix self._hatchet = hatchet id_suffix = f'__{wrapped.id}' if wrapped.id else '' self._name = f'{task_name_prefix}__mcp_server{id_suffix}' + self.run_context_type = run_context_type @hatchet.durable_task( name=f'{self._name}.get_tools', @@ -72,7 +86,9 @@ async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], ctx: DurableContext, ) -> dict[str, ToolsetTool[AgentDepsT]]: - return await super(HatchetMCPServer, self).get_tools(input.ctx) + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + + return await super(HatchetMCPServer, self).get_tools(run_context) self.hatchet_wrapped_get_tools_task = wrapped_get_tools_task @@ -97,7 +113,8 @@ async def wrapped_call_tool_task( input: CallToolInput[AgentDepsT], _ctx: DurableContext, ) -> CallToolOutput[AgentDepsT]: - result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, input.ctx, input.tool) + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, input.tool) return CallToolOutput[AgentDepsT](result=result) @@ -112,7 +129,13 @@ def hatchet_tasks(self) -> list[Standalone[Any, Any]]: ] async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: - return await self.hatchet_wrapped_get_tools_task.aio_run(GetToolsInput(ctx=ctx)) + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + return await self.hatchet_wrapped_get_tools_task.aio_run( + GetToolsInput( + serialized_run_context=serialized_run_context, + deps=ctx.deps, + ) + ) async def call_tool( self, @@ -121,12 +144,15 @@ async def call_tool( ctx: RunContext[AgentDepsT], tool: ToolsetTool[AgentDepsT], ) -> ToolResult: + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + wrapped_tool_output = await self.hatchet_wrapped_call_tool_task.aio_run( CallToolInput( name=name, tool_args=tool_args, - ctx=ctx, tool=tool, + serialized_run_context=serialized_run_context, + deps=ctx.deps, ) ) From 1fb3881af18dcd0b00e825cb352dc91ff2dde342 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 11:52:04 -0400 Subject: [PATCH 16/42] feat: stricter typing on run context --- .../durable_exec/hatchet/_run_context.py | 39 ++++++++++++------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py index eed5a455d8..a5bf7d6e97 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py @@ -2,10 +2,21 @@ from typing import Any +from pydantic import BaseModel, Field + from pydantic_ai.exceptions import UserError from pydantic_ai.tools import AgentDepsT, RunContext +class SerializedHatchetRunContext(BaseModel): + retries: dict[str, int] = Field(default_factory=dict) + tool_call_id: str | None = None + tool_name: str | None = None + tool_call_approved: bool = False + retry: int = 0 + run_step: int = 0 + + class HatchetRunContext(RunContext[AgentDepsT]): """The [`RunContext`][pydantic_ai.tools.RunContext] subclass to use to serialize and deserialize the run context for use inside a Hatchet task. @@ -34,18 +45,20 @@ def __getattribute__(self, name: str) -> Any: raise e @classmethod - def serialize_run_context(cls, ctx: RunContext[Any]) -> dict[str, Any]: - """Serialize the run context to a `dict[str, Any]`.""" - return { - 'retries': ctx.retries, - 'tool_call_id': ctx.tool_call_id, - 'tool_name': ctx.tool_name, - 'tool_call_approved': ctx.tool_call_approved, - 'retry': ctx.retry, - 'run_step': ctx.run_step, - } + def serialize_run_context(cls, ctx: RunContext[Any]) -> SerializedHatchetRunContext: + """Serialize the run context to a `SerializedHatchetRunContext`.""" + return SerializedHatchetRunContext( + retries=ctx.retries, + tool_call_id=ctx.tool_call_id, + tool_name=ctx.tool_name, + tool_call_approved=ctx.tool_call_approved, + retry=ctx.retry, + run_step=ctx.run_step, + ) @classmethod - def deserialize_run_context(cls, ctx: dict[str, Any], deps: AgentDepsT) -> HatchetRunContext[AgentDepsT]: - """Deserialize the run context from a `dict[str, Any]`.""" - return cls(**ctx, deps=deps) + def deserialize_run_context( + cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT + ) -> HatchetRunContext[AgentDepsT]: + """Deserialize the run context from a `SerializedHatchetRunContext`.""" + return cls(**ctx.model_dump(), deps=deps) From e791cd5a421cc7ac76be737a06cc8a81b253cc5e Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:01:18 -0400 Subject: [PATCH 17/42] fix: pass types around --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 6 ++++++ .../pydantic_ai/durable_exec/hatchet/_mcp_server.py | 10 +++++----- .../pydantic_ai/durable_exec/hatchet/_toolset.py | 10 +++++++++- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index db746a09c9..e083cb8a72 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -26,6 +26,7 @@ from pydantic_ai.toolsets import AbstractToolset from ._model import HatchetModel +from ._run_context import HatchetRunContext from ._utils import TaskConfig @@ -56,6 +57,7 @@ def __init__( name: str | None = None, mcp_task_config: TaskConfig | None = None, model_task_config: TaskConfig | None = None, + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], ): """Wrap an agent to enable it with Hatchet durable tasks, by automatically offloading model requests, tool calls, and MCP server communication to Hatchet tasks. @@ -67,6 +69,7 @@ def __init__( name: Optional unique agent name to use in the Hatchet tasks' names. If not provided, the agent's `name` will be used. mcp_task_config: The base Hatchet task config to use for MCP server tasks. If no config is provided, use the default settings. model_task_config: The Hatchet task config to use for model request tasks. If no config is provided, use the default settings. + run_context_type: The `HatchetRunContext` (sub)class that's used to serialize and deserialize the run context. """ super().__init__(wrapped) @@ -90,6 +93,7 @@ def __init__( hatchet=self._hatchet, ) hatchet_agent_name = self._name + self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: from ._toolset import hatchetize_toolset @@ -99,6 +103,8 @@ def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[ hatchet=hatchet, task_name_prefix=hatchet_agent_name, task_config=mcp_task_config or TaskConfig(), + deps_type=self.deps_type, + run_context_type=run_context_type, ) self._toolsets = [toolset.visit_and_replace(hatchetify_toolset) for toolset in wrapped.toolsets] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index af17e0e272..d3651d1d82 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -10,7 +10,7 @@ from pydantic_ai.tools import AgentDepsT, RunContext from pydantic_ai.toolsets.abstract import ToolsetTool -from ._run_context import HatchetRunContext +from ._run_context import HatchetRunContext, SerializedHatchetRunContext from ._toolset import HatchetWrapperToolset from ._utils import TaskConfig @@ -23,7 +23,7 @@ class GetToolsInput(BaseModel, Generic[AgentDepsT]): model_config = ConfigDict(arbitrary_types_allowed=True) - serialized_run_context: Any + serialized_run_context: SerializedHatchetRunContext deps: AgentDepsT @@ -34,7 +34,7 @@ class CallToolInput(BaseModel, Generic[AgentDepsT]): tool_args: dict[str, Any] tool: ToolsetTool[AgentDepsT] - serialized_run_context: Any + serialized_run_context: SerializedHatchetRunContext deps: AgentDepsT @@ -63,7 +63,7 @@ def __init__( self._hatchet = hatchet id_suffix = f'__{wrapped.id}' if wrapped.id else '' self._name = f'{task_name_prefix}__mcp_server{id_suffix}' - self.run_context_type = run_context_type + self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type @hatchet.durable_task( name=f'{self._name}.get_tools', @@ -84,7 +84,7 @@ def __init__( ) async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], - ctx: DurableContext, + _ctx: DurableContext, ) -> dict[str, ToolsetTool[AgentDepsT]]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py index 4afa929099..051c2c9283 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py @@ -4,6 +4,7 @@ from collections.abc import Callable from typing import Any +from hatchet_sdk import Hatchet from hatchet_sdk.runnables.workflow import Standalone from pydantic_ai.tools import AgentDepsT @@ -11,6 +12,7 @@ from pydantic_ai.toolsets.function import FunctionToolset from pydantic_ai.toolsets.wrapper import WrapperToolset +from ._run_context import HatchetRunContext from ._utils import TaskConfig @@ -34,9 +36,11 @@ def visit_and_replace( def hatchetize_toolset( toolset: AbstractToolset[AgentDepsT], - hatchet: Any, # Hatchet instance + hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], ) -> AbstractToolset[AgentDepsT]: """Hatchetize a toolset. @@ -45,6 +49,8 @@ def hatchetize_toolset( hatchet: The Hatchet instance to use for creating tasks. task_name_prefix: Prefix for Hatchet task names. task_config: The Hatchet task config to use. + deps_type: The type of agent's dependencies object. It needs to be serializable using Pydantic's `TypeAdapter`. + run_context_type: The `HatchetRunContext` (sub)class that's used to serialize and deserialize the run context. """ if isinstance(toolset, FunctionToolset): from ._function_toolset import HatchetFunctionToolset @@ -69,6 +75,8 @@ def hatchetize_toolset( hatchet=hatchet, task_name_prefix=task_name_prefix, task_config=task_config, + deps_type=deps_type, + run_context_type=run_context_type, ) return toolset From 0626318da6544a6b2b13e52a55cc3b89874d933c Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:26:57 -0400 Subject: [PATCH 18/42] fix: make tool and tool def serializable a la temporal impl --- .../durable_exec/hatchet/_mcp_server.py | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index d3651d1d82..d6eb95552a 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -8,7 +8,10 @@ from pydantic import BaseModel, ConfigDict from pydantic_ai.tools import AgentDepsT, RunContext -from pydantic_ai.toolsets.abstract import ToolsetTool +from pydantic_ai.toolsets.abstract import ( + ToolDefinition, + ToolsetTool, +) from ._run_context import HatchetRunContext, SerializedHatchetRunContext from ._toolset import HatchetWrapperToolset @@ -32,7 +35,7 @@ class CallToolInput(BaseModel, Generic[AgentDepsT]): name: str tool_args: dict[str, Any] - tool: ToolsetTool[AgentDepsT] + tool_def: ToolDefinition serialized_run_context: SerializedHatchetRunContext deps: AgentDepsT @@ -85,10 +88,12 @@ def __init__( async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], _ctx: DurableContext, - ) -> dict[str, ToolsetTool[AgentDepsT]]: + ) -> dict[str, ToolDefinition]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) - return await super(HatchetMCPServer, self).get_tools(run_context) + tools = await super(HatchetMCPServer, self).get_tools(run_context) + + return {name: tool.tool_def for name, tool in tools.items()} self.hatchet_wrapped_get_tools_task = wrapped_get_tools_task @@ -114,7 +119,9 @@ async def wrapped_call_tool_task( _ctx: DurableContext, ) -> CallToolOutput[AgentDepsT]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) - result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, input.tool) + tool = self.tool_for_tool_def(input.tool_def) + + result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, tool) return CallToolOutput[AgentDepsT](result=result) @@ -128,15 +135,21 @@ def hatchet_tasks(self) -> list[Standalone[Any, Any]]: self.hatchet_wrapped_call_tool_task, ] + def tool_for_tool_def(self, tool_def: ToolDefinition) -> ToolsetTool[AgentDepsT]: + assert isinstance(self.wrapped, MCPServer) + return self.wrapped.tool_for_tool_def(tool_def) + async def get_tools(self, ctx: RunContext[AgentDepsT]) -> dict[str, ToolsetTool[AgentDepsT]]: serialized_run_context = self.run_context_type.serialize_run_context(ctx) - return await self.hatchet_wrapped_get_tools_task.aio_run( + tool_defs = await self.hatchet_wrapped_get_tools_task.aio_run( GetToolsInput( serialized_run_context=serialized_run_context, deps=ctx.deps, ) ) + return {name: self.tool_for_tool_def(tool_def) for name, tool_def in tool_defs.items()} + async def call_tool( self, name: str, @@ -150,7 +163,7 @@ async def call_tool( CallToolInput( name=name, tool_args=tool_args, - tool=tool, + tool_def=tool.tool_def, serialized_run_context=serialized_run_context, deps=ctx.deps, ) From c89bc10be407027feb40bb708e0a93d05de99135 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:27:56 -0400 Subject: [PATCH 19/42] fix: comment --- .../pydantic_ai/durable_exec/hatchet/_mcp_server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index d6eb95552a..6d5c9b90e9 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -91,6 +91,8 @@ async def wrapped_get_tools_task( ) -> dict[str, ToolDefinition]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + # ToolsetTool is not serializable as it holds a SchemaValidator (which is also the same for every MCP tool so unnecessary to pass along the wire every time), + # so we just return the ToolDefinitions and wrap them in ToolsetTool outside of the activity. tools = await super(HatchetMCPServer, self).get_tools(run_context) return {name: tool.tool_def for name, tool in tools.items()} From 9b7a2c34a4ecc6048c8e1d749e00a28b3bcd5bfc Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:41:19 -0400 Subject: [PATCH 20/42] feat: more work on making the hatchet function toolset impl more similar to temporal --- .../durable_exec/hatchet/_function_toolset.py | 37 +++++++++++-------- .../durable_exec/hatchet/_mcp_server.py | 8 ++-- 2 files changed, 25 insertions(+), 20 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py index 7c8961947d..dbe83bc8e3 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -1,38 +1,37 @@ -from __future__ import annotations - -from typing import Any, Generic +from typing import Any from hatchet_sdk import Context, Hatchet from hatchet_sdk.runnables.workflow import Standalone -from pydantic import BaseModel, ConfigDict from pydantic_ai.exceptions import UserError from pydantic_ai.tools import AgentDepsT, RunContext from pydantic_ai.toolsets import FunctionToolset, ToolsetTool +from ._mcp_server import CallToolInput +from ._run_context import HatchetRunContext from ._toolset import HatchetWrapperToolset from ._utils import TaskConfig -class CallToolInput(BaseModel, Generic[AgentDepsT]): - model_config = ConfigDict(arbitrary_types_allowed=True) - - name: str - tool_args: dict[str, Any] - ctx: RunContext[AgentDepsT] - - class HatchetFunctionToolset(HatchetWrapperToolset[AgentDepsT]): """A wrapper for FunctionToolset that integrates with Hatchet, turning tool calls into Hatchet tasks.""" def __init__( - self, wrapped: FunctionToolset[AgentDepsT], *, hatchet: Hatchet, task_name_prefix: str, task_config: TaskConfig + self, + wrapped: FunctionToolset[AgentDepsT], + *, + hatchet: Hatchet, + task_name_prefix: str, + task_config: TaskConfig, + deps_type: type[AgentDepsT], + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], ): super().__init__(wrapped) self._task_config = task_config self._task_name_prefix = task_name_prefix self._hatchet = hatchet self._tool_tasks: dict[str, Standalone[Any, Any]] = {} + self.run_context_type = run_context_type for tool_name, tool in wrapped.tools.items(): task_name = f'{task_name_prefix}__function_tool__{tool_name}' @@ -59,8 +58,12 @@ async def tool_task( input: CallToolInput[AgentDepsT], _ctx: Context, ) -> Any: + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, deps=input.deps + ) + return await super(HatchetFunctionToolset, self).call_tool( - current_tool_name, input.tool_args, input.ctx, current_tool + current_tool_name, input.tool_args, run_context, current_tool ) return tool_task @@ -86,10 +89,14 @@ async def call_tool( ) tool_task = self._tool_tasks[name] + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + return await tool_task.aio_run( CallToolInput( name=name, tool_args=tool_args, - ctx=ctx, + tool_def=tool.tool_def, + serialized_run_context=serialized_run_context, + deps=ctx.deps, ) ) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 6d5c9b90e9..5d03862840 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from abc import ABC from typing import TYPE_CHECKING, Any, Generic, TypeVar @@ -44,7 +42,7 @@ class CallToolInput(BaseModel, Generic[AgentDepsT]): class CallToolOutput(BaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) - result: ToolResult + result: 'ToolResult' class HatchetMCPServer(HatchetWrapperToolset[AgentDepsT], ABC): @@ -52,7 +50,7 @@ class HatchetMCPServer(HatchetWrapperToolset[AgentDepsT], ABC): def __init__( self, - wrapped: MCPServer, + wrapped: 'MCPServer', *, hatchet: Hatchet, task_name_prefix: str, @@ -158,7 +156,7 @@ async def call_tool( tool_args: dict[str, Any], ctx: RunContext[AgentDepsT], tool: ToolsetTool[AgentDepsT], - ) -> ToolResult: + ) -> 'ToolResult': serialized_run_context = self.run_context_type.serialize_run_context(ctx) wrapped_tool_output = await self.hatchet_wrapped_call_tool_task.aio_run( From d76c42cca292e79b84a89fd0192588a171fd37af Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:54:40 -0400 Subject: [PATCH 21/42] fix: improve typing on tasks --- .../durable_exec/hatchet/_function_toolset.py | 30 +++++++++++++------ .../durable_exec/hatchet/_toolset.py | 2 ++ 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py index dbe83bc8e3..ede2ffafbd 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -2,6 +2,7 @@ from hatchet_sdk import Context, Hatchet from hatchet_sdk.runnables.workflow import Standalone +from pydantic import BaseModel, ConfigDict from pydantic_ai.exceptions import UserError from pydantic_ai.tools import AgentDepsT, RunContext @@ -13,6 +14,12 @@ from ._utils import TaskConfig +class ToolOutput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + result: Any + + class HatchetFunctionToolset(HatchetWrapperToolset[AgentDepsT]): """A wrapper for FunctionToolset that integrates with Hatchet, turning tool calls into Hatchet tasks.""" @@ -30,13 +37,13 @@ def __init__( self._task_config = task_config self._task_name_prefix = task_name_prefix self._hatchet = hatchet - self._tool_tasks: dict[str, Standalone[Any, Any]] = {} + self._tool_tasks: dict[str, Standalone[CallToolInput[AgentDepsT], ToolOutput]] = {} self.run_context_type = run_context_type - for tool_name, tool in wrapped.tools.items(): + for tool_name in wrapped.tools.keys(): task_name = f'{task_name_prefix}__function_tool__{tool_name}' - def make_tool_task(current_tool_name: str, current_tool: Any): + def make_tool_task(current_tool_name: str): @hatchet.task( name=task_name, description=self._task_config.description, @@ -57,18 +64,21 @@ def make_tool_task(current_tool_name: str, current_tool: Any): async def tool_task( input: CallToolInput[AgentDepsT], _ctx: Context, - ) -> Any: + ) -> ToolOutput: run_context = self.run_context_type.deserialize_run_context( input.serialized_run_context, deps=input.deps ) + tool = (await wrapped.get_tools(run_context))[current_tool_name] - return await super(HatchetFunctionToolset, self).call_tool( - current_tool_name, input.tool_args, run_context, current_tool + result = await super(HatchetFunctionToolset, self).call_tool( + current_tool_name, input.tool_args, run_context, tool ) + return ToolOutput(result=result) + return tool_task - self._tool_tasks[tool_name] = make_tool_task(tool_name, tool) + self._tool_tasks[tool_name] = make_tool_task(tool_name) @property def hatchet_tasks(self) -> list[Standalone[Any, Any]]: @@ -88,10 +98,10 @@ async def call_tool( 'Removing or renaming tools during an agent run is not supported with Hatchet.' ) - tool_task = self._tool_tasks[name] + tool_task: Standalone[CallToolInput[AgentDepsT], ToolOutput] = self._tool_tasks[name] serialized_run_context = self.run_context_type.serialize_run_context(ctx) - return await tool_task.aio_run( + output = await tool_task.aio_run( CallToolInput( name=name, tool_args=tool_args, @@ -100,3 +110,5 @@ async def call_tool( deps=ctx.deps, ) ) + + return output.result diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py index 051c2c9283..8b0c8d5e07 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_toolset.py @@ -60,6 +60,8 @@ def hatchetize_toolset( hatchet=hatchet, task_name_prefix=task_name_prefix, task_config=task_config, + deps_type=deps_type, + run_context_type=run_context_type, ) try: From 722f356b99ce17d1a33fd4b8c1c897190ec72295 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:58:11 -0400 Subject: [PATCH 22/42] fix: use tasks for everything except the agent itself --- .../pydantic_ai/durable_exec/hatchet/_mcp_server.py | 10 +++++----- .../pydantic_ai/durable_exec/hatchet/_model.py | 6 +++--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 5d03862840..9306e8a6b4 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -1,7 +1,7 @@ from abc import ABC from typing import TYPE_CHECKING, Any, Generic, TypeVar -from hatchet_sdk import DurableContext, Hatchet +from hatchet_sdk import Context, Hatchet from hatchet_sdk.runnables.workflow import Standalone from pydantic import BaseModel, ConfigDict @@ -66,7 +66,7 @@ def __init__( self._name = f'{task_name_prefix}__mcp_server{id_suffix}' self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type - @hatchet.durable_task( + @hatchet.task( name=f'{self._name}.get_tools', description=self._task_config.description, input_validator=GetToolsInput[AgentDepsT], @@ -85,7 +85,7 @@ def __init__( ) async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], - _ctx: DurableContext, + _ctx: Context, ) -> dict[str, ToolDefinition]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) @@ -97,7 +97,7 @@ async def wrapped_get_tools_task( self.hatchet_wrapped_get_tools_task = wrapped_get_tools_task - @hatchet.durable_task( + @hatchet.task( name=f'{self._name}.call_tool', description=self._task_config.description, input_validator=CallToolInput[AgentDepsT], @@ -116,7 +116,7 @@ async def wrapped_get_tools_task( ) async def wrapped_call_tool_task( input: CallToolInput[AgentDepsT], - _ctx: DurableContext, + _ctx: Context, ) -> CallToolOutput[AgentDepsT]: run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) tool = self.tool_for_tool_def(input.tool_def) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index fceef61594..14248fdbc9 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,6 +1,6 @@ from __future__ import annotations -from hatchet_sdk import DurableContext, Hatchet +from hatchet_sdk import Context, Hatchet from pydantic import BaseModel, ConfigDict from pydantic_ai.messages import ( @@ -31,7 +31,7 @@ def __init__(self, model: Model, *, task_name_prefix: str, task_config: TaskConf self.hatchet = hatchet self._task_name_prefix = task_name_prefix - @hatchet.durable_task( + @hatchet.task( name=f'{self._task_name_prefix}__model.request', description=self.task_config.description, input_validator=ModelInput, @@ -50,7 +50,7 @@ def __init__(self, model: Model, *, task_name_prefix: str, task_config: TaskConf ) async def wrapped_request_task( input: ModelInput, - _ctx: DurableContext, + _ctx: Context, ) -> ModelResponse: return await super(HatchetModel, self).request( input.messages, input.model_settings, input.model_request_parameters From f09374928e4dbd6f11a9b94b4de9151ee6249018 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 12:58:52 -0400 Subject: [PATCH 23/42] fix: tool naming --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 14248fdbc9..18696f1f62 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -32,7 +32,7 @@ def __init__(self, model: Model, *, task_name_prefix: str, task_config: TaskConf self._task_name_prefix = task_name_prefix @hatchet.task( - name=f'{self._task_name_prefix}__model.request', + name=f'{self._task_name_prefix}__model__request', description=self.task_config.description, input_validator=ModelInput, version=self.task_config.version, From 10063c3bb705a388ced04e70bec5c692dd36fc49 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 18 Sep 2025 13:10:19 -0400 Subject: [PATCH 24/42] feat: add hatchet metadata on agent run --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index e083cb8a72..0f0ce698b5 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -3,8 +3,9 @@ from collections.abc import Iterator, Sequence from contextlib import contextmanager from typing import Any, Generic, overload +from uuid import uuid4 -from hatchet_sdk import DurableContext, Hatchet +from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions from hatchet_sdk.runnables.workflow import BaseWorkflow from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never @@ -225,6 +226,8 @@ async def run( event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, **_deprecated_kwargs: Never, ) -> AgentRunResult[Any]: + agent_run_id = uuid4() + """Run the agent with a user prompt in async mode.""" result = await self.hatchet_wrapped_run_workflow.aio_run( RunAgentInput[RunOutputDataT, AgentDepsT]( @@ -241,7 +244,13 @@ async def run( toolsets=toolsets, event_stream_handler=event_stream_handler, deprecated_kwargs=_deprecated_kwargs, - ) + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(agent_run_id), + } + ), ) if isinstance(result, dict): From fad04c87dc57762a8e2a530eb9f60e82483aa8db Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Fri, 19 Sep 2025 08:40:30 -0400 Subject: [PATCH 25/42] feat: add run_sync method to the hatchet agent --- .../durable_exec/hatchet/_agent.py | 87 ++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 0f0ce698b5..3fd1fe02d1 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -226,9 +226,9 @@ async def run( event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, **_deprecated_kwargs: Never, ) -> AgentRunResult[Any]: + """Run the agent with a user prompt in async mode.""" agent_run_id = uuid4() - """Run the agent with a user prompt in async mode.""" result = await self.hatchet_wrapped_run_workflow.aio_run( RunAgentInput[RunOutputDataT, AgentDepsT]( user_prompt=user_prompt, @@ -257,3 +257,88 @@ async def run( return TypeAdapter(AgentRunResult[Any]).validate_python(result) return result + + @overload + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[OutputDataT]: ... + + @overload + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AgentRunResult[RunOutputDataT]: ... + + def run_sync( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AgentRunResult[Any]: + """Run the agent with a user prompt in sync mode.""" + agent_run_id = uuid4() + + result = self.hatchet_wrapped_run_workflow.run( + RunAgentInput[RunOutputDataT, AgentDepsT]( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + deprecated_kwargs=_deprecated_kwargs, + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(agent_run_id), + } + ), + ) + + if isinstance(result, dict): + return TypeAdapter(AgentRunResult[Any]).validate_python(result) + + return result From aff17c00dae13f0825004cb8dbb9bf5799ae50be Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Fri, 19 Sep 2025 09:02:52 -0400 Subject: [PATCH 26/42] feat: add run_stream and iter methods --- .../durable_exec/hatchet/_agent.py | 289 +++++++++++++++++- 1 file changed, 279 insertions(+), 10 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 3fd1fe02d1..11425635ab 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -1,7 +1,7 @@ from __future__ import annotations -from collections.abc import Iterator, Sequence -from contextlib import contextmanager +from collections.abc import AsyncIterator, Iterator, Sequence +from contextlib import AbstractAsyncContextManager, asynccontextmanager, contextmanager from typing import Any, Generic, overload from uuid import uuid4 @@ -10,19 +10,18 @@ from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never -from pydantic_ai import ( - messages as _messages, - models, - usage as _usage, -) -from pydantic_ai.agent import AbstractAgent, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent +from pydantic_ai import _utils, messages as _messages, models, usage as _usage +from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent from pydantic_ai.exceptions import UserError from pydantic_ai.models import Model from pydantic_ai.output import OutputDataT, OutputSpec +from pydantic_ai.result import StreamedRunResult from pydantic_ai.settings import ModelSettings from pydantic_ai.tools import ( AgentDepsT, DeferredToolResults, + Tool, + ToolFuncEither, ) from pydantic_ai.toolsets import AbstractToolset @@ -96,7 +95,7 @@ def __init__( hatchet_agent_name = self._name self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type - def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: + def hatchetize_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: from ._toolset import hatchetize_toolset return hatchetize_toolset( @@ -108,7 +107,7 @@ def hatchetify_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[ run_context_type=run_context_type, ) - self._toolsets = [toolset.visit_and_replace(hatchetify_toolset) for toolset in wrapped.toolsets] + self._toolsets = [toolset.visit_and_replace(hatchetize_toolset) for toolset in wrapped.toolsets] @hatchet.durable_task(name=f'{self._name}.run', input_validator=RunAgentInput[Any, Any]) async def wrapped_run_workflow( @@ -342,3 +341,273 @@ def run_sync( return TypeAdapter(AgentRunResult[Any]).validate_python(result) return result + + @overload + def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AbstractAsyncContextManager[StreamedRunResult[AgentDepsT, OutputDataT]]: ... + + @overload + def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + ) -> AbstractAsyncContextManager[StreamedRunResult[AgentDepsT, RunOutputDataT]]: ... + + @asynccontextmanager + async def run_stream( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + **_deprecated_kwargs: Never, + ) -> AsyncIterator[StreamedRunResult[AgentDepsT, Any]]: + """Run the agent with a user prompt in async mode, returning a streamed response. + + Example: + ```python + from pydantic_ai import Agent + + agent = Agent('openai:gpt-4o') + + async def main(): + async with agent.run_stream('What is the capital of the UK?') as response: + print(await response.get_output()) + #> The capital of the UK is London. + ``` + + Args: + user_prompt: User input to start/continue the conversation. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + message_history: History of the conversation so far. + deferred_tool_results: Optional results for deferred tool calls in the message history. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + toolsets: Optional additional toolsets for this run. + event_stream_handler: Optional event stream handler to use for this run. It will receive all the events up until the final result is found, which you can then read or stream from inside the context manager. + + Returns: + The result of the run. + """ + async with super().run_stream( + user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + **_deprecated_kwargs, + ) as result: + yield result + + @overload + def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AbstractAsyncContextManager[AgentRun[AgentDepsT, OutputDataT]]: ... + + @overload + def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT], + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AbstractAsyncContextManager[AgentRun[AgentDepsT, RunOutputDataT]]: ... + + @asynccontextmanager + async def iter( + self, + user_prompt: str | Sequence[_messages.UserContent] | None = None, + *, + output_type: OutputSpec[RunOutputDataT] | None = None, + message_history: list[_messages.ModelMessage] | None = None, + deferred_tool_results: DeferredToolResults | None = None, + model: models.Model | models.KnownModelName | str | None = None, + deps: AgentDepsT = None, + model_settings: ModelSettings | None = None, + usage_limits: _usage.UsageLimits | None = None, + usage: _usage.RunUsage | None = None, + infer_name: bool = True, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None, + **_deprecated_kwargs: Never, + ) -> AsyncIterator[AgentRun[AgentDepsT, Any]]: + """A contextmanager which can be used to iterate over the agent graph's nodes as they are executed. + + This method builds an internal agent graph (using system prompts, tools and output schemas) and then returns an + `AgentRun` object. The `AgentRun` can be used to async-iterate over the nodes of the graph as they are + executed. This is the API to use if you want to consume the outputs coming from each LLM model response, or the + stream of events coming from the execution of tools. + + The `AgentRun` also provides methods to access the full message history, new messages, and usage statistics, + and the final result of the run once it has completed. + + For more details, see the documentation of `AgentRun`. + + Example: + ```python + from pydantic_ai import Agent + + agent = Agent('openai:gpt-4o') + + async def main(): + nodes = [] + async with agent.iter('What is the capital of France?') as agent_run: + async for node in agent_run: + nodes.append(node) + print(nodes) + ''' + [ + UserPromptNode( + user_prompt='What is the capital of France?', + instructions_functions=[], + system_prompts=(), + system_prompt_functions=[], + system_prompt_dynamic_functions={}, + ), + ModelRequestNode( + request=ModelRequest( + parts=[ + UserPromptPart( + content='What is the capital of France?', + timestamp=datetime.datetime(...), + ) + ] + ) + ), + CallToolsNode( + model_response=ModelResponse( + parts=[TextPart(content='The capital of France is Paris.')], + usage=RequestUsage(input_tokens=56, output_tokens=7), + model_name='gpt-4o', + timestamp=datetime.datetime(...), + ) + ), + End(data=FinalResult(output='The capital of France is Paris.')), + ] + ''' + print(agent_run.result.output) + #> The capital of France is Paris. + ``` + + Args: + user_prompt: User input to start/continue the conversation. + output_type: Custom output type to use for this run, `output_type` may only be used if the agent has no + output validators since output validators would expect an argument that matches the agent's output type. + message_history: History of the conversation so far. + deferred_tool_results: Optional results for deferred tool calls in the message history. + model: Optional model to use for this run, required if `model` was not set when creating the agent. + deps: Optional dependencies to use for this run. + model_settings: Optional settings to use for this model's request. + usage_limits: Optional limits on model request count or token usage. + usage: Optional usage to start with, useful for resuming a conversation or agents used in tools. + infer_name: Whether to try to infer the agent name from the call frame if it's not set. + toolsets: Optional additional toolsets for this run. + + Returns: + The result of the run. + """ + async with super().iter( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + **_deprecated_kwargs, + ) as run: + yield run + + @contextmanager + def override( + self, + *, + deps: AgentDepsT | _utils.Unset = _utils.UNSET, + model: models.Model | models.KnownModelName | str | _utils.Unset = _utils.UNSET, + toolsets: Sequence[AbstractToolset[AgentDepsT]] | _utils.Unset = _utils.UNSET, + tools: Sequence[Tool[AgentDepsT] | ToolFuncEither[AgentDepsT, ...]] | _utils.Unset = _utils.UNSET, + ) -> Iterator[None]: + """Context manager to temporarily override agent dependencies, model, toolsets, or tools. + + This is particularly useful when testing. + You can find an example of this [here](../testing.md#overriding-model-via-pytest-fixtures). + + Args: + deps: The dependencies to use instead of the dependencies passed to the agent run. + model: The model to use instead of the model passed to the agent run. + toolsets: The toolsets to use instead of the toolsets passed to the agent constructor and agent run. + tools: The tools to use instead of the tools registered with the agent. + """ + with super().override(deps=deps, model=model, toolsets=toolsets, tools=tools): + yield From 756da3375e8615df836e6e07140fff23b0c9dab8 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Fri, 19 Sep 2025 09:53:27 -0400 Subject: [PATCH 27/42] fix: return list of workflows so we don't need a cast --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 11425635ab..cc5af9bafb 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -158,7 +158,7 @@ def _hatchet_overrides(self) -> Iterator[None]: yield @property - def workflows(self) -> Sequence[BaseWorkflow[Any]]: + def workflows(self) -> list[BaseWorkflow[Any]]: workflows: list[BaseWorkflow[Any]] = [ self.hatchet_wrapped_run_workflow, self._model.hatchet_wrapped_request_task, From 29e79cd007e51c70dac07047aa5b7e8c15d1c019 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 11:40:42 -0400 Subject: [PATCH 28/42] fix: add hacks around run_stream from inside of a task --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index cc5af9bafb..dc298e1764 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -6,6 +6,7 @@ from uuid import uuid4 from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions +from hatchet_sdk.runnables.contextvars import ctx_workflow_run_id from hatchet_sdk.runnables.workflow import BaseWorkflow from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never @@ -428,6 +429,14 @@ async def main(): Returns: The result of the run. """ + run_id = ctx_workflow_run_id.get() + if run_id: + raise UserError( + '`agent.run_stream()` cannot currently be used inside a Hatchet workflow. ' + 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' + 'Please file an issue if this is not sufficient for your use case.' + ) + async with super().run_stream( user_prompt, output_type=output_type, @@ -573,6 +582,14 @@ async def main(): Returns: The result of the run. """ + run_id = ctx_workflow_run_id.get() + if run_id: + raise UserError( + '`agent.iter()` cannot currently be used inside a Hatchet workflow. ' + 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' + 'Please file an issue if this is not sufficient for your use case.' + ) + async with super().iter( user_prompt=user_prompt, output_type=output_type, From e0124159e92de99a56a245f622271c9d4ef8f75d Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 11:55:11 -0400 Subject: [PATCH 29/42] fix: start implementing event stream handler for agent properly --- .../durable_exec/hatchet/_agent.py | 14 ++++--- .../durable_exec/hatchet/_model.py | 38 ++++++++++++++++++- uv.lock | 16 ++++---- 3 files changed, 54 insertions(+), 14 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index dc298e1764..51f15edf65 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -6,7 +6,6 @@ from uuid import uuid4 from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions -from hatchet_sdk.runnables.contextvars import ctx_workflow_run_id from hatchet_sdk.runnables.workflow import BaseWorkflow from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never @@ -56,6 +55,7 @@ def __init__( hatchet: Hatchet, *, name: str | None = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, mcp_task_config: TaskConfig | None = None, model_task_config: TaskConfig | None = None, run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], @@ -76,6 +76,7 @@ def __init__( self._name = name or wrapped.name self._hatchet = hatchet + self._event_stream_handler = event_stream_handler if not self._name: raise UserError( @@ -92,6 +93,7 @@ def __init__( task_name_prefix=self._name, task_config=model_task_config or TaskConfig(), hatchet=self._hatchet, + event_stream_handler=self.event_stream_handler, ) hatchet_agent_name = self._name self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type @@ -148,6 +150,10 @@ def name(self, value: str | None) -> None: # pragma: no cover def model(self) -> Model: return self._model + @property + def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: + return self.event_stream_handler or super().event_stream_handler + @property def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: with self._hatchet_overrides(): @@ -429,8 +435,7 @@ async def main(): Returns: The result of the run. """ - run_id = ctx_workflow_run_id.get() - if run_id: + if self._hatchet.is_in_task_run: raise UserError( '`agent.run_stream()` cannot currently be used inside a Hatchet workflow. ' 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' @@ -582,8 +587,7 @@ async def main(): Returns: The result of the run. """ - run_id = ctx_workflow_run_id.get() - if run_id: + if self._hatchet.is_in_task_run: raise UserError( '`agent.iter()` cannot currently be used inside a Hatchet workflow. ' 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 18696f1f62..170a2458d1 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,5 +1,8 @@ from __future__ import annotations +from contextlib import asynccontextmanager +from typing import Any + from hatchet_sdk import Context, Hatchet from pydantic import BaseModel, ConfigDict @@ -10,6 +13,7 @@ from pydantic_ai.models import Model, ModelRequestParameters from pydantic_ai.models.wrapper import WrapperModel from pydantic_ai.settings import ModelSettings +from pydantic_ai.tools import RunContext from ._utils import TaskConfig @@ -25,11 +29,20 @@ class ModelInput(BaseModel): class HatchetModel(WrapperModel): """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" - def __init__(self, model: Model, *, task_name_prefix: str, task_config: TaskConfig, hatchet: Hatchet): + def __init__( + self, + model: Model, + *, + task_name_prefix: str, + task_config: TaskConfig, + hatchet: Hatchet, + event_stream_handler: Any = None, + ): super().__init__(model) self.task_config = task_config self.hatchet = hatchet self._task_name_prefix = task_name_prefix + self.event_stream_handler = event_stream_handler @hatchet.task( name=f'{self._task_name_prefix}__model__request', @@ -71,3 +84,26 @@ async def request( model_request_parameters=model_request_parameters, ) ) + + @asynccontextmanager + async def request_stream( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + run_context: RunContext[Any] | None = None, + ): + if self.hatchet.is_in_task_run: + async with super().request_stream( + messages, model_settings, model_request_parameters, run_context + ) as streamed_response: + if self.event_stream_handler is not None and run_context is not None: + await self.event_stream_handler(run_context, streamed_response) + async for _ in streamed_response: + pass + yield streamed_response + else: + async with super().request_stream( + messages, model_settings, model_request_parameters, run_context + ) as streamed_response: + yield streamed_response diff --git a/uv.lock b/uv.lock index 2c19105148..37b9df4ede 100644 --- a/uv.lock +++ b/uv.lock @@ -1685,8 +1685,8 @@ wheels = [ [[package]] name = "hatchet-sdk" -version = "1.18.2" -source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#2bb989b6eb381670c2059f113286132342b0f46b" } +version = "1.19.1" +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#6975357268b740219309633352d85f377782179e" } dependencies = [ { name = "aiohttp" }, { name = "grpcio" }, @@ -3166,11 +3166,11 @@ wheels = [ [[package]] name = "prometheus-client" -version = "0.22.1" +version = "0.21.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/cf/40dde0a2be27cc1eb41e333d1a674a74ce8b8b0457269cc640fd42b07cf7/prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28", size = 69746, upload-time = "2025-06-02T14:29:01.152Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/14/7d0f567991f3a9af8d1cd4f619040c93b68f09a02b6d0b6ab1b2d1ded5fe/prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb", size = 78551, upload-time = "2024-12-03T14:59:12.164Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/ae/ec06af4fe3ee72d16973474f122541746196aaa16cea6f66d18b963c6177/prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094", size = 58694, upload-time = "2025-06-02T14:29:00.068Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c2/ab7d37426c179ceb9aeb109a85cda8948bb269b7561a0be870cc656eefe4/prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301", size = 54682, upload-time = "2024-12-03T14:59:10.935Z" }, ] [[package]] @@ -5002,11 +5002,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.3.0" +version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]] From 251955486f7128cdaf178a56343f691634b42b3f Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 11:55:52 -0400 Subject: [PATCH 30/42] fix: recursion --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 51f15edf65..4fa221ccc8 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -152,7 +152,7 @@ def model(self) -> Model: @property def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: - return self.event_stream_handler or super().event_stream_handler + return self._event_stream_handler or super().event_stream_handler @property def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: From 68bb231320a1c7e8a1db2ff779d82c04d3747052 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 13:05:23 -0400 Subject: [PATCH 31/42] feat: streaming impl, part i --- .../durable_exec/hatchet/_agent.py | 91 +++++++++++++++---- .../durable_exec/hatchet/_model.py | 79 +++++++++++++++- 2 files changed, 150 insertions(+), 20 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 4fa221ccc8..249333c5dc 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -68,6 +68,7 @@ def __init__( wrapped: The agent to wrap. hatchet: The Hatchet instance to use for creating tasks. name: Optional unique agent name to use in the Hatchet tasks' names. If not provided, the agent's `name` will be used. + event_stream_handler: Optional event stream handler to use for this agent. mcp_task_config: The base Hatchet task config to use for MCP server tasks. If no config is provided, use the default settings. model_task_config: The Hatchet task config to use for model request tasks. If no config is provided, use the default settings. run_context_type: The `HatchetRunContext` (sub)class that's used to serialize and deserialize the run context. @@ -75,8 +76,10 @@ def __init__( super().__init__(wrapped) self._name = name or wrapped.name - self._hatchet = hatchet self._event_stream_handler = event_stream_handler + self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type + + self._hatchet = hatchet if not self._name: raise UserError( @@ -94,9 +97,10 @@ def __init__( task_config=model_task_config or TaskConfig(), hatchet=self._hatchet, event_stream_handler=self.event_stream_handler, + deps_type=self.deps_type, + run_context_type=self.run_context_type, ) hatchet_agent_name = self._name - self.run_context_type: type[HatchetRunContext[AgentDepsT]] = run_context_type def hatchetize_toolset(toolset: AbstractToolset[AgentDepsT]) -> AbstractToolset[AgentDepsT]: from ._toolset import hatchetize_toolset @@ -136,6 +140,30 @@ async def wrapped_run_workflow( self.hatchet_wrapped_run_workflow = wrapped_run_workflow + @hatchet.durable_task(name=f'{self._name}.run_stream', input_validator=RunAgentInput[Any, Any]) + async def wrapped_run_stream_workflow( + input: RunAgentInput[RunOutputDataT, AgentDepsT], + _ctx: DurableContext, + ) -> AgentRunResult[Any]: + with self._hatchet_overrides(): + return await super(WrapperAgent, self).run( + input.user_prompt, + output_type=input.output_type, + message_history=input.message_history, + deferred_tool_results=input.deferred_tool_results, + model=input.model, + deps=input.deps, + model_settings=input.model_settings, + usage_limits=input.usage_limits, + usage=input.usage, + infer_name=input.infer_name, + toolsets=input.toolsets, + event_stream_handler=input.event_stream_handler, + **input.deprecated_kwargs, + ) + + self.hatchet_wrapped_run_stream_workflow = wrapped_run_stream_workflow + @property def name(self) -> str | None: return self._name @@ -168,7 +196,9 @@ def _hatchet_overrides(self) -> Iterator[None]: def workflows(self) -> list[BaseWorkflow[Any]]: workflows: list[BaseWorkflow[Any]] = [ self.hatchet_wrapped_run_workflow, + self.hatchet_wrapped_run_stream_workflow, self._model.hatchet_wrapped_request_task, + self._model.hatchet_wrapped_request_stream_task, ] for toolset in self._toolsets: @@ -442,22 +472,47 @@ async def main(): 'Please file an issue if this is not sufficient for your use case.' ) - async with super().run_stream( - user_prompt, - output_type=output_type, - message_history=message_history, - deferred_tool_results=deferred_tool_results, - model=model, - deps=deps, - model_settings=model_settings, - usage_limits=usage_limits, - usage=usage, - infer_name=infer_name, - toolsets=toolsets, - event_stream_handler=event_stream_handler, - **_deprecated_kwargs, - ) as result: - yield result + # Execute the streaming via Hatchet workflow + agent_run_id = uuid4() + + result = await self.hatchet_wrapped_run_stream_workflow.aio_run( + RunAgentInput[RunOutputDataT, AgentDepsT]( + user_prompt=user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + deprecated_kwargs=_deprecated_kwargs, + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(agent_run_id), + 'hatchet__stream_mode': True, + } + ), + ) + + if isinstance(result, dict): + result = TypeAdapter(AgentRunResult[Any]).validate_python(result) + + messages = result.all_messages() + new_message_index = result._new_message_index + + streamed_result = StreamedRunResult( + all_messages=messages, + new_message_index=new_message_index, + run_result=result, + ) + + yield streamed_result @overload def iter( diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 170a2458d1..87cf6df83b 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -6,6 +6,7 @@ from hatchet_sdk import Context, Hatchet from pydantic import BaseModel, ConfigDict +from pydantic_ai.agent import EventStreamHandler from pydantic_ai.messages import ( ModelMessage, ModelResponse, @@ -13,8 +14,9 @@ from pydantic_ai.models import Model, ModelRequestParameters from pydantic_ai.models.wrapper import WrapperModel from pydantic_ai.settings import ModelSettings -from pydantic_ai.tools import RunContext +from pydantic_ai.tools import AgentDepsT, RunContext +from ._run_context import HatchetRunContext from ._utils import TaskConfig @@ -26,6 +28,16 @@ class ModelInput(BaseModel): model_request_parameters: ModelRequestParameters +class ModelStreamInput(BaseModel): + model_config = ConfigDict(arbitrary_types_allowed=True) + + messages: list[ModelMessage] + model_settings: ModelSettings | None + model_request_parameters: ModelRequestParameters + serialized_run_context: Any + deps_type_name: str + + class HatchetModel(WrapperModel): """A wrapper for Model that integrates with Hatchet, turning request and request_stream to Hatchet tasks.""" @@ -36,13 +48,17 @@ def __init__( task_name_prefix: str, task_config: TaskConfig, hatchet: Hatchet, - event_stream_handler: Any = None, + event_stream_handler: EventStreamHandler[AgentDepsT] | None = None, + deps_type: type[AgentDepsT] | None = None, + run_context_type: type[HatchetRunContext[AgentDepsT]] = HatchetRunContext[AgentDepsT], ): super().__init__(model) self.task_config = task_config self.hatchet = hatchet self._task_name_prefix = task_name_prefix self.event_stream_handler = event_stream_handler + self.deps_type = deps_type + self.run_context_type = run_context_type @hatchet.task( name=f'{self._task_name_prefix}__model__request', @@ -71,6 +87,44 @@ async def wrapped_request_task( self.hatchet_wrapped_request_task = wrapped_request_task + @hatchet.task( + name=f'{self._task_name_prefix}__model__request_stream', + description=self.task_config.description, + input_validator=ModelStreamInput, + version=self.task_config.version, + sticky=self.task_config.sticky, + default_priority=self.task_config.default_priority, + concurrency=self.task_config.concurrency, + schedule_timeout=self.task_config.schedule_timeout, + execution_timeout=self.task_config.execution_timeout, + retries=self.task_config.retries, + rate_limits=self.task_config.rate_limits, + desired_worker_labels=self.task_config.desired_worker_labels, + backoff_factor=self.task_config.backoff_factor, + backoff_max_seconds=self.task_config.backoff_max_seconds, + default_filters=self.task_config.default_filters, + ) + async def wrapped_request_stream_task( + input: ModelStreamInput, + _ctx: Context, + ) -> ModelResponse: + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, deps=input.serialized_run_context + ) + + async with super(HatchetModel, self).request_stream( + input.messages, input.model_settings, input.model_request_parameters, run_context + ) as streamed_response: + if self.event_stream_handler: + await self.event_stream_handler(run_context, streamed_response) + + async for _ in streamed_response: + pass + + return streamed_response.get() + + self.hatchet_wrapped_request_stream_task = wrapped_request_stream_task + async def request( self, messages: list[ModelMessage], @@ -107,3 +161,24 @@ async def request_stream( messages, model_settings, model_request_parameters, run_context ) as streamed_response: yield streamed_response + + async def request_stream_via_task( + self, + messages: list[ModelMessage], + model_settings: ModelSettings | None, + model_request_parameters: ModelRequestParameters, + run_context: RunContext[Any] | None = None, + ) -> ModelResponse: + """Execute a streaming request via Hatchet task and return the final response.""" + if run_context is None: + raise ValueError('run_context is required for streaming via Hatchet task') + + return await self.hatchet_wrapped_request_stream_task.aio_run( + ModelStreamInput( + messages=messages, + model_settings=model_settings, + model_request_parameters=model_request_parameters, + serialized_run_context=self.run_context_type.serialize_run_context(run_context), + deps_type_name=self.deps_type.__name__ if self.deps_type else '', + ) + ) From 84eac57f85395695fba0c9587c926d0e2ca506e2 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 13:09:31 -0400 Subject: [PATCH 32/42] fix: streaming --- .../durable_exec/hatchet/_agent.py | 31 +++++++++---------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 249333c5dc..c82486a1d9 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -145,22 +145,21 @@ async def wrapped_run_stream_workflow( input: RunAgentInput[RunOutputDataT, AgentDepsT], _ctx: DurableContext, ) -> AgentRunResult[Any]: - with self._hatchet_overrides(): - return await super(WrapperAgent, self).run( - input.user_prompt, - output_type=input.output_type, - message_history=input.message_history, - deferred_tool_results=input.deferred_tool_results, - model=input.model, - deps=input.deps, - model_settings=input.model_settings, - usage_limits=input.usage_limits, - usage=input.usage, - infer_name=input.infer_name, - toolsets=input.toolsets, - event_stream_handler=input.event_stream_handler, - **input.deprecated_kwargs, - ) + return await wrapped.run( + input.user_prompt, + output_type=input.output_type, + message_history=input.message_history, + deferred_tool_results=input.deferred_tool_results, + model=self._model, + deps=input.deps, + model_settings=input.model_settings, + usage_limits=input.usage_limits, + usage=input.usage, + infer_name=input.infer_name, + toolsets=self._toolsets, + event_stream_handler=input.event_stream_handler, + **input.deprecated_kwargs, + ) self.hatchet_wrapped_run_stream_workflow = wrapped_run_stream_workflow From af7e924d1d398b368479fc648d4beed03358b3c8 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 25 Sep 2025 14:30:53 -0400 Subject: [PATCH 33/42] hack: partially working streaming implementation --- .../durable_exec/hatchet/_agent.py | 10 ++- .../durable_exec/hatchet/_function_toolset.py | 2 +- .../durable_exec/hatchet/_mcp_server.py | 8 +- .../durable_exec/hatchet/_model.py | 78 +++++++++++++------ .../durable_exec/hatchet/_run_context.py | 14 +++- uv.lock | 2 +- 6 files changed, 81 insertions(+), 33 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index c82486a1d9..abd16e21be 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -14,6 +14,7 @@ from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent from pydantic_ai.exceptions import UserError from pydantic_ai.models import Model +from pydantic_ai.messages import ModelMessage from pydantic_ai.output import OutputDataT, OutputSpec from pydantic_ai.result import StreamedRunResult from pydantic_ai.settings import ModelSettings @@ -474,7 +475,7 @@ async def main(): # Execute the streaming via Hatchet workflow agent_run_id = uuid4() - result = await self.hatchet_wrapped_run_stream_workflow.aio_run( + ref = await self.hatchet_wrapped_run_stream_workflow.aio_run_no_wait( RunAgentInput[RunOutputDataT, AgentDepsT]( user_prompt=user_prompt, output_type=output_type, @@ -499,6 +500,13 @@ async def main(): ), ) + all_messages: list[ModelMessage] = [] + + async for x in self._hatchet.runs.subscribe_to_stream(ref.workflow_run_id): + print('\nx', x) + + result = await ref.aio_result() + if isinstance(result, dict): result = TypeAdapter(AgentRunResult[Any]).validate_python(result) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py index ede2ffafbd..c1c8de7b29 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -66,7 +66,7 @@ async def tool_task( _ctx: Context, ) -> ToolOutput: run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.deps + input.serialized_run_context, deps=input.deps, hatchet_context=_ctx ) tool = (await wrapped.get_tools(run_context))[current_tool_name] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 9306e8a6b4..0ad942a4e5 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -87,7 +87,9 @@ async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], _ctx: Context, ) -> dict[str, ToolDefinition]: - run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, deps=input.deps, hatchet_context=_ctx + ) # ToolsetTool is not serializable as it holds a SchemaValidator (which is also the same for every MCP tool so unnecessary to pass along the wire every time), # so we just return the ToolDefinitions and wrap them in ToolsetTool outside of the activity. @@ -118,7 +120,9 @@ async def wrapped_call_tool_task( input: CallToolInput[AgentDepsT], _ctx: Context, ) -> CallToolOutput[AgentDepsT]: - run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, deps=input.deps, hatchet_context=_ctx + ) tool = self.tool_for_tool_def(input.tool_def) result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, tool) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 87cf6df83b..1006bacbee 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,20 +1,25 @@ from __future__ import annotations +from collections.abc import AsyncIterator from contextlib import asynccontextmanager +from datetime import datetime from typing import Any from hatchet_sdk import Context, Hatchet from pydantic import BaseModel, ConfigDict from pydantic_ai.agent import EventStreamHandler +from pydantic_ai.exceptions import UserError from pydantic_ai.messages import ( ModelMessage, ModelResponse, + ModelResponseStreamEvent, ) -from pydantic_ai.models import Model, ModelRequestParameters +from pydantic_ai.models import Model, ModelRequestParameters, StreamedResponse from pydantic_ai.models.wrapper import WrapperModel from pydantic_ai.settings import ModelSettings from pydantic_ai.tools import AgentDepsT, RunContext +from pydantic_ai.usage import RequestUsage from ._run_context import HatchetRunContext from ._utils import TaskConfig @@ -108,15 +113,16 @@ async def wrapped_request_stream_task( input: ModelStreamInput, _ctx: Context, ) -> ModelResponse: + assert self.event_stream_handler + run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.serialized_run_context + input.serialized_run_context, deps=input.serialized_run_context, hatchet_context=_ctx ) - async with super(HatchetModel, self).request_stream( + async with self.wrapped.request_stream( input.messages, input.model_settings, input.model_request_parameters, run_context ) as streamed_response: - if self.event_stream_handler: - await self.event_stream_handler(run_context, streamed_response) + await self.event_stream_handler(run_context, streamed_response) async for _ in streamed_response: pass @@ -148,33 +154,21 @@ async def request_stream( run_context: RunContext[Any] | None = None, ): if self.hatchet.is_in_task_run: - async with super().request_stream( - messages, model_settings, model_request_parameters, run_context - ) as streamed_response: - if self.event_stream_handler is not None and run_context is not None: - await self.event_stream_handler(run_context, streamed_response) - async for _ in streamed_response: - pass - yield streamed_response - else: async with super().request_stream( messages, model_settings, model_request_parameters, run_context ) as streamed_response: yield streamed_response + return - async def request_stream_via_task( - self, - messages: list[ModelMessage], - model_settings: ModelSettings | None, - model_request_parameters: ModelRequestParameters, - run_context: RunContext[Any] | None = None, - ) -> ModelResponse: - """Execute a streaming request via Hatchet task and return the final response.""" if run_context is None: - raise ValueError('run_context is required for streaming via Hatchet task') + raise UserError( + 'A Hatchet model cannot be used with `pydantic_ai.direct.model_request_stream()` as it requires a `run_context`. Set an `event_stream_handler` on the agent and use `agent.run()` instead.' + ) - return await self.hatchet_wrapped_request_stream_task.aio_run( - ModelStreamInput( + assert self.event_stream_handler is not None + + res = await self.hatchet_wrapped_request_stream_task.aio_run( + input=ModelStreamInput( messages=messages, model_settings=model_settings, model_request_parameters=model_request_parameters, @@ -182,3 +176,37 @@ async def request_stream_via_task( deps_type_name=self.deps_type.__name__ if self.deps_type else '', ) ) + + yield HatchetStreamedResponse( + model_request_parameters=model_request_parameters, + response=res, + ) + + +class HatchetStreamedResponse(StreamedResponse): + def __init__(self, model_request_parameters: ModelRequestParameters, response: ModelResponse): + super().__init__(model_request_parameters) + self.response = response + + async def _get_event_iterator(self) -> AsyncIterator[ModelResponseStreamEvent]: + return + # noinspection PyUnreachableCode + yield + + def get(self) -> ModelResponse: + return self.response + + def usage(self) -> RequestUsage: + return self.response.usage # pragma: no cover + + @property + def model_name(self) -> str: + return self.response.model_name or '' # pragma: no cover + + @property + def provider_name(self) -> str: + return self.response.provider_name or '' # pragma: no cover + + @property + def timestamp(self) -> datetime: + return self.response.timestamp # pragma: no cover diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py index a5bf7d6e97..bbb0bd4bf1 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py @@ -2,6 +2,7 @@ from typing import Any +from hatchet_sdk import Context from pydantic import BaseModel, Field from pydantic_ai.exceptions import UserError @@ -24,7 +25,10 @@ class HatchetRunContext(RunContext[AgentDepsT]): To make another attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to [`HatchetAgent`][pydantic_ai.durable_exec.hatchet.HatchetAgent]. """ - def __init__(self, deps: AgentDepsT, **kwargs: Any): + def __init__(self, deps: AgentDepsT, hatchet_context: Context | None, **kwargs: Any): + print('HatchetRunContext init', kwargs, hatchet_context) + self.hatchet_contetxt = hatchet_context + self.__dict__ = {**kwargs, 'deps': deps} setattr( self, @@ -58,7 +62,11 @@ def serialize_run_context(cls, ctx: RunContext[Any]) -> SerializedHatchetRunCont @classmethod def deserialize_run_context( - cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT + cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT, hatchet_context: Context ) -> HatchetRunContext[AgentDepsT]: """Deserialize the run context from a `SerializedHatchetRunContext`.""" - return cls(**ctx.model_dump(), deps=deps) + return cls( + deps=deps, + hatchet_context=hatchet_context, + **ctx.model_dump(), + ) diff --git a/uv.lock b/uv.lock index 37b9df4ede..b61c6a8cf8 100644 --- a/uv.lock +++ b/uv.lock @@ -1686,7 +1686,7 @@ wheels = [ [[package]] name = "hatchet-sdk" version = "1.19.1" -source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#6975357268b740219309633352d85f377782179e" } +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#ed3a64aa36f0e4c15da148023894422ed5b1eccd" } dependencies = [ { name = "aiohttp" }, { name = "grpcio" }, From 7bfac3306a95a3b750bee2f5df3d9d7e2a2726f9 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Fri, 26 Sep 2025 14:26:02 -0400 Subject: [PATCH 34/42] feat: incremental progress on streaming --- .../durable_exec/hatchet/_agent.py | 48 ++++++++----------- .../durable_exec/hatchet/_model.py | 13 +++-- 2 files changed, 29 insertions(+), 32 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index abd16e21be..29ac238383 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -1,6 +1,6 @@ from __future__ import annotations -from collections.abc import AsyncIterator, Iterator, Sequence +from collections.abc import AsyncIterable, AsyncIterator, Iterator, Sequence from contextlib import AbstractAsyncContextManager, asynccontextmanager, contextmanager from typing import Any, Generic, overload from uuid import uuid4 @@ -13,14 +13,15 @@ from pydantic_ai import _utils, messages as _messages, models, usage as _usage from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent from pydantic_ai.exceptions import UserError +from pydantic_ai.messages import AgentStreamEvent, ModelMessage from pydantic_ai.models import Model -from pydantic_ai.messages import ModelMessage from pydantic_ai.output import OutputDataT, OutputSpec from pydantic_ai.result import StreamedRunResult from pydantic_ai.settings import ModelSettings from pydantic_ai.tools import ( AgentDepsT, DeferredToolResults, + RunContext, Tool, ToolFuncEither, ) @@ -45,7 +46,6 @@ class RunAgentInput(BaseModel, Generic[RunOutputDataT, AgentDepsT]): usage: _usage.RunUsage | None = None infer_name: bool = True toolsets: Sequence[AbstractToolset[AgentDepsT]] | None = None - event_stream_handler: EventStreamHandler[AgentDepsT] | None = None deprecated_kwargs: dict[str, Any] = Field(default_factory=dict) @@ -97,7 +97,6 @@ def __init__( task_name_prefix=self._name, task_config=model_task_config or TaskConfig(), hatchet=self._hatchet, - event_stream_handler=self.event_stream_handler, deps_type=self.deps_type, run_context_type=self.run_context_type, ) @@ -135,7 +134,6 @@ async def wrapped_run_workflow( usage=input.usage, infer_name=input.infer_name, toolsets=input.toolsets, - event_stream_handler=input.event_stream_handler, **input.deprecated_kwargs, ) @@ -144,8 +142,15 @@ async def wrapped_run_workflow( @hatchet.durable_task(name=f'{self._name}.run_stream', input_validator=RunAgentInput[Any, Any]) async def wrapped_run_stream_workflow( input: RunAgentInput[RunOutputDataT, AgentDepsT], - _ctx: DurableContext, + hctx: DurableContext, ) -> AgentRunResult[Any]: + async def event_stream_handler( + ctx: RunContext[AgentDepsT], events: AsyncIterable[AgentStreamEvent] + ) -> None: + async for event in events: + b: bytes = TypeAdapter(AgentStreamEvent).dump_json(event) + await hctx.aio_put_stream(b) + return await wrapped.run( input.user_prompt, output_type=input.output_type, @@ -158,7 +163,7 @@ async def wrapped_run_stream_workflow( usage=input.usage, infer_name=input.infer_name, toolsets=self._toolsets, - event_stream_handler=input.event_stream_handler, + event_stream_handler=event_stream_handler, **input.deprecated_kwargs, ) @@ -178,10 +183,6 @@ def name(self, value: str | None) -> None: # pragma: no cover def model(self) -> Model: return self._model - @property - def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: - return self._event_stream_handler or super().event_stream_handler - @property def toolsets(self) -> Sequence[AbstractToolset[AgentDepsT]]: with self._hatchet_overrides(): @@ -488,7 +489,6 @@ async def main(): usage=usage, infer_name=infer_name, toolsets=toolsets, - event_stream_handler=event_stream_handler, deprecated_kwargs=_deprecated_kwargs, ), options=TriggerWorkflowOptions( @@ -501,25 +501,19 @@ async def main(): ) all_messages: list[ModelMessage] = [] + index = 0 async for x in self._hatchet.runs.subscribe_to_stream(ref.workflow_run_id): - print('\nx', x) + adapter = TypeAdapter[AgentStreamEvent](AgentStreamEvent) + parsed = adapter.validate_json(x) - result = await ref.aio_result() - - if isinstance(result, dict): - result = TypeAdapter(AgentRunResult[Any]).validate_python(result) - - messages = result.all_messages() - new_message_index = result._new_message_index - - streamed_result = StreamedRunResult( - all_messages=messages, - new_message_index=new_message_index, - run_result=result, - ) + yield StreamedRunResult( + all_messages=messages, + new_message_index=index, + run_result=result, + ) - yield streamed_result + index += 1 @overload def iter( diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 1006bacbee..7d797f0fc8 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -1,7 +1,9 @@ from __future__ import annotations +import json from collections.abc import AsyncIterator from contextlib import asynccontextmanager +from dataclasses import asdict from datetime import datetime from typing import Any @@ -111,21 +113,22 @@ async def wrapped_request_task( ) async def wrapped_request_stream_task( input: ModelStreamInput, - _ctx: Context, + ctx: Context, ) -> ModelResponse: assert self.event_stream_handler run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.serialized_run_context, hatchet_context=_ctx + input.serialized_run_context, deps=input.serialized_run_context, hatchet_context=ctx ) async with self.wrapped.request_stream( input.messages, input.model_settings, input.model_request_parameters, run_context ) as streamed_response: - await self.event_stream_handler(run_context, streamed_response) + async for s in streamed_response: + print('streamed chunk', s) + serialized = json.dumps(asdict(s), default=str) - async for _ in streamed_response: - pass + await ctx.aio_put_stream(serialized) return streamed_response.get() From 4efad747b8dba589e4101d56fa459ccc399031c6 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Fri, 26 Sep 2025 15:51:48 -0400 Subject: [PATCH 35/42] feat: more incremental streaming progress --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 29ac238383..b98b26342a 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -13,7 +13,7 @@ from pydantic_ai import _utils, messages as _messages, models, usage as _usage from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent from pydantic_ai.exceptions import UserError -from pydantic_ai.messages import AgentStreamEvent, ModelMessage +from pydantic_ai.messages import AgentStreamEvent from pydantic_ai.models import Model from pydantic_ai.output import OutputDataT, OutputSpec from pydantic_ai.result import StreamedRunResult @@ -148,7 +148,7 @@ async def event_stream_handler( ctx: RunContext[AgentDepsT], events: AsyncIterable[AgentStreamEvent] ) -> None: async for event in events: - b: bytes = TypeAdapter(AgentStreamEvent).dump_json(event) + b = TypeAdapter[AgentStreamEvent](AgentStreamEvent).dump_json(event) await hctx.aio_put_stream(b) return await wrapped.run( @@ -279,7 +279,6 @@ async def run( usage=usage, infer_name=infer_name, toolsets=toolsets, - event_stream_handler=event_stream_handler, deprecated_kwargs=_deprecated_kwargs, ), options=TriggerWorkflowOptions( @@ -364,7 +363,6 @@ def run_sync( usage=usage, infer_name=infer_name, toolsets=toolsets, - event_stream_handler=event_stream_handler, deprecated_kwargs=_deprecated_kwargs, ), options=TriggerWorkflowOptions( @@ -500,20 +498,11 @@ async def main(): ), ) - all_messages: list[ModelMessage] = [] - index = 0 - async for x in self._hatchet.runs.subscribe_to_stream(ref.workflow_run_id): adapter = TypeAdapter[AgentStreamEvent](AgentStreamEvent) parsed = adapter.validate_json(x) - yield StreamedRunResult( - all_messages=messages, - new_message_index=index, - run_result=result, - ) - - index += 1 + yield parsed @overload def iter( From c77f051c1fa941bf856ea81132673ba1da0b6390 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 12:29:23 -0400 Subject: [PATCH 36/42] fix: use temporal-style stream handler for now --- .../durable_exec/hatchet/_agent.py | 77 +++++++++++-------- .../durable_exec/hatchet/_run_context.py | 9 +-- 2 files changed, 46 insertions(+), 40 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index b98b26342a..b7b025c6a6 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -5,7 +5,7 @@ from typing import Any, Generic, overload from uuid import uuid4 -from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions +from hatchet_sdk import Context, DurableContext, Hatchet, TriggerWorkflowOptions from hatchet_sdk.runnables.workflow import BaseWorkflow from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never @@ -49,6 +49,12 @@ class RunAgentInput(BaseModel, Generic[RunOutputDataT, AgentDepsT]): deprecated_kwargs: dict[str, Any] = Field(default_factory=dict) +class EventStreamHandlerInput(BaseModel, Generic[AgentDepsT]): + event: _messages.AgentStreamEvent + serialized_run_context: Any + deps: AgentDepsT + + class HatchetAgent(WrapperAgent[AgentDepsT, OutputDataT]): def __init__( self, @@ -169,6 +175,27 @@ async def event_stream_handler( self.hatchet_wrapped_run_stream_workflow = wrapped_run_stream_workflow + @hatchet.task( + name=f'{self._name}.event_stream_handler', + input_validator=EventStreamHandlerInput, + ) + async def event_stream_handler_task(input: EventStreamHandlerInput, ctx: Context) -> None: + # We can never get here without an `event_stream_handler`, as `HatchetAgent.run_stream` and `HatchetAgent.iter` raise an error saying to use `HatchetAgent.run` instead, + # and that only ends up calling `event_stream_handler` if it is set. + assert self.event_stream_handler is not None + + run_context = self.run_context_type.deserialize_run_context( + input.serialized_run_context, + deps=input.deps, + ) + + async def streamed_response(): + yield input.event + + await self.event_stream_handler(run_context, streamed_response()) + + self.hatchet_wrapped_event_stream_handler = event_stream_handler_task + @property def name(self) -> str | None: return self._name @@ -471,38 +498,22 @@ async def main(): 'Please file an issue if this is not sufficient for your use case.' ) - # Execute the streaming via Hatchet workflow - agent_run_id = uuid4() - - ref = await self.hatchet_wrapped_run_stream_workflow.aio_run_no_wait( - RunAgentInput[RunOutputDataT, AgentDepsT]( - user_prompt=user_prompt, - output_type=output_type, - message_history=message_history, - deferred_tool_results=deferred_tool_results, - model=model, - deps=deps, - model_settings=model_settings, - usage_limits=usage_limits, - usage=usage, - infer_name=infer_name, - toolsets=toolsets, - deprecated_kwargs=_deprecated_kwargs, - ), - options=TriggerWorkflowOptions( - additional_metadata={ - 'hatchet__agent_name': self._name, - 'hatchet__agent_run_id': str(agent_run_id), - 'hatchet__stream_mode': True, - } - ), - ) - - async for x in self._hatchet.runs.subscribe_to_stream(ref.workflow_run_id): - adapter = TypeAdapter[AgentStreamEvent](AgentStreamEvent) - parsed = adapter.validate_json(x) - - yield parsed + async with super().run_stream( + user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + **_deprecated_kwargs, + ) as result: + yield result @overload def iter( diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py index bbb0bd4bf1..5bb8517544 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_run_context.py @@ -2,7 +2,6 @@ from typing import Any -from hatchet_sdk import Context from pydantic import BaseModel, Field from pydantic_ai.exceptions import UserError @@ -25,10 +24,7 @@ class HatchetRunContext(RunContext[AgentDepsT]): To make another attribute available, create a `HatchetRunContext` subclass with a custom `serialize_run_context` class method that returns a dictionary that includes the attribute and pass it to [`HatchetAgent`][pydantic_ai.durable_exec.hatchet.HatchetAgent]. """ - def __init__(self, deps: AgentDepsT, hatchet_context: Context | None, **kwargs: Any): - print('HatchetRunContext init', kwargs, hatchet_context) - self.hatchet_contetxt = hatchet_context - + def __init__(self, deps: AgentDepsT, **kwargs: Any): self.__dict__ = {**kwargs, 'deps': deps} setattr( self, @@ -62,11 +58,10 @@ def serialize_run_context(cls, ctx: RunContext[Any]) -> SerializedHatchetRunCont @classmethod def deserialize_run_context( - cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT, hatchet_context: Context + cls, ctx: SerializedHatchetRunContext, deps: AgentDepsT ) -> HatchetRunContext[AgentDepsT]: """Deserialize the run context from a `SerializedHatchetRunContext`.""" return cls( deps=deps, - hatchet_context=hatchet_context, **ctx.model_dump(), ) From 5a99248468da22ff6fd4cb4fbab3ea57e8b52c58 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 12:31:40 -0400 Subject: [PATCH 37/42] fix: generic --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index b7b025c6a6..8f12967ba4 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -177,9 +177,9 @@ async def event_stream_handler( @hatchet.task( name=f'{self._name}.event_stream_handler', - input_validator=EventStreamHandlerInput, + input_validator=EventStreamHandlerInput[AgentDepsT], ) - async def event_stream_handler_task(input: EventStreamHandlerInput, ctx: Context) -> None: + async def event_stream_handler_task(input: EventStreamHandlerInput[AgentDepsT], ctx: Context) -> None: # We can never get here without an `event_stream_handler`, as `HatchetAgent.run_stream` and `HatchetAgent.iter` raise an error saying to use `HatchetAgent.run` instead, # and that only ends up calling `event_stream_handler` if it is set. assert self.event_stream_handler is not None From 5b6a73241bcc5fd5384c0f4cbf37c4f9da31f0f8 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 12:51:59 -0400 Subject: [PATCH 38/42] feat: temporal-ish event stream handler --- .../durable_exec/hatchet/_agent.py | 37 +++++++++++++++---- 1 file changed, 30 insertions(+), 7 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 8f12967ba4..56f8441cd0 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -196,6 +196,36 @@ async def streamed_response(): self.hatchet_wrapped_event_stream_handler = event_stream_handler_task + @property + def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: + handler = self._event_stream_handler or super().event_stream_handler + print('Getting event_stream_handler', handler, self._hatchet.is_in_task_run) + if handler is None: + return None + elif self._hatchet.is_in_task_run: + return self._call_event_stream_handler_task + else: + return handler + + async def _call_event_stream_handler_task( + self, ctx: RunContext[AgentDepsT], stream: AsyncIterable[_messages.AgentStreamEvent] + ) -> None: + serialized_run_context = self.run_context_type.serialize_run_context(ctx) + async for event in stream: + await self.hatchet_wrapped_event_stream_handler.aio_run( + input=EventStreamHandlerInput[AgentDepsT]( + event=event, + serialized_run_context=serialized_run_context, + deps=ctx.deps, + ), + options=TriggerWorkflowOptions( + additional_metadata={ + 'hatchet__agent_name': self._name, + 'hatchet__agent_run_id': str(uuid4()), + } + ), + ) + @property def name(self) -> str | None: return self._name @@ -643,13 +673,6 @@ async def main(): Returns: The result of the run. """ - if self._hatchet.is_in_task_run: - raise UserError( - '`agent.iter()` cannot currently be used inside a Hatchet workflow. ' - 'Set an `event_stream_handler` on the agent and use `agent.run()` instead. ' - 'Please file an issue if this is not sufficient for your use case.' - ) - async with super().iter( user_prompt=user_prompt, output_type=output_type, From 4286ddfd370901229858fdd4ecb764838150b849 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 14:13:36 -0400 Subject: [PATCH 39/42] fix: stream handler --- .../durable_exec/hatchet/_agent.py | 83 +++++-------------- 1 file changed, 19 insertions(+), 64 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 56f8441cd0..4bc979a749 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -105,6 +105,7 @@ def __init__( hatchet=self._hatchet, deps_type=self.deps_type, run_context_type=self.run_context_type, + event_stream_handler=self.event_stream_handler, ) hatchet_agent_name = self._name @@ -175,56 +176,9 @@ async def event_stream_handler( self.hatchet_wrapped_run_stream_workflow = wrapped_run_stream_workflow - @hatchet.task( - name=f'{self._name}.event_stream_handler', - input_validator=EventStreamHandlerInput[AgentDepsT], - ) - async def event_stream_handler_task(input: EventStreamHandlerInput[AgentDepsT], ctx: Context) -> None: - # We can never get here without an `event_stream_handler`, as `HatchetAgent.run_stream` and `HatchetAgent.iter` raise an error saying to use `HatchetAgent.run` instead, - # and that only ends up calling `event_stream_handler` if it is set. - assert self.event_stream_handler is not None - - run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, - deps=input.deps, - ) - - async def streamed_response(): - yield input.event - - await self.event_stream_handler(run_context, streamed_response()) - - self.hatchet_wrapped_event_stream_handler = event_stream_handler_task - @property def event_stream_handler(self) -> EventStreamHandler[AgentDepsT] | None: - handler = self._event_stream_handler or super().event_stream_handler - print('Getting event_stream_handler', handler, self._hatchet.is_in_task_run) - if handler is None: - return None - elif self._hatchet.is_in_task_run: - return self._call_event_stream_handler_task - else: - return handler - - async def _call_event_stream_handler_task( - self, ctx: RunContext[AgentDepsT], stream: AsyncIterable[_messages.AgentStreamEvent] - ) -> None: - serialized_run_context = self.run_context_type.serialize_run_context(ctx) - async for event in stream: - await self.hatchet_wrapped_event_stream_handler.aio_run( - input=EventStreamHandlerInput[AgentDepsT]( - event=event, - serialized_run_context=serialized_run_context, - deps=ctx.deps, - ), - options=TriggerWorkflowOptions( - additional_metadata={ - 'hatchet__agent_name': self._name, - 'hatchet__agent_run_id': str(uuid4()), - } - ), - ) + return self._event_stream_handler or super().event_stream_handler @property def name(self) -> str | None: @@ -528,22 +482,23 @@ async def main(): 'Please file an issue if this is not sufficient for your use case.' ) - async with super().run_stream( - user_prompt, - output_type=output_type, - message_history=message_history, - deferred_tool_results=deferred_tool_results, - model=model, - deps=deps, - model_settings=model_settings, - usage_limits=usage_limits, - usage=usage, - infer_name=infer_name, - toolsets=toolsets, - event_stream_handler=event_stream_handler, - **_deprecated_kwargs, - ) as result: - yield result + with self._hatchet_overrides(): + async with super().run_stream( + user_prompt, + output_type=output_type, + message_history=message_history, + deferred_tool_results=deferred_tool_results, + model=model, + deps=deps, + model_settings=model_settings, + usage_limits=usage_limits, + usage=usage, + infer_name=infer_name, + toolsets=toolsets, + event_stream_handler=event_stream_handler, + **_deprecated_kwargs, + ) as result: + yield result @overload def iter( From fd0247059557371da3600a626f07313ce3c5e164 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 14:13:49 -0400 Subject: [PATCH 40/42] chore: lint --- pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 4bc979a749..7ab150b755 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -5,7 +5,7 @@ from typing import Any, Generic, overload from uuid import uuid4 -from hatchet_sdk import Context, DurableContext, Hatchet, TriggerWorkflowOptions +from hatchet_sdk import DurableContext, Hatchet, TriggerWorkflowOptions from hatchet_sdk.runnables.workflow import BaseWorkflow from pydantic import BaseModel, ConfigDict, Field, TypeAdapter from typing_extensions import Never From 5d8f9715ca9f3515e2825e06687aeea5b89d6eb2 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 15:00:52 -0400 Subject: [PATCH 41/42] chore: lock --- uv.lock | 185 +++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 171 insertions(+), 14 deletions(-) diff --git a/uv.lock b/uv.lock index 270552e04d..ddd19ab389 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.10" resolution-markers = [ "python_full_version >= '3.13' and platform_python_implementation == 'PyPy'", @@ -1400,6 +1400,120 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4d/11/1019a6cfdb2e520cb461cf70d859216be8ca122ddf5ad301fc3b0ee45fd4/groq-0.25.0-py3-none-any.whl", hash = "sha256:aadc78b40b1809cdb196b1aa8c7f7293108767df1508cafa3e0d5045d9328e7a", size = 129371, upload-time = "2025-05-16T19:57:41.786Z" }, ] +[[package]] +name = "grpcio" +version = "1.75.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/57/89fd829fb00a6d0bee3fbcb2c8a7aa0252d908949b6ab58bfae99d39d77e/grpcio-1.75.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:1712b5890b22547dd29f3215c5788d8fc759ce6dd0b85a6ba6e2731f2d04c088", size = 5705534, upload-time = "2025-09-26T09:00:52.225Z" }, + { url = "https://files.pythonhosted.org/packages/76/dd/2f8536e092551cf804e96bcda79ecfbc51560b214a0f5b7ebc253f0d4664/grpcio-1.75.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8d04e101bba4b55cea9954e4aa71c24153ba6182481b487ff376da28d4ba46cf", size = 11484103, upload-time = "2025-09-26T09:00:59.457Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3d/affe2fb897804c98d56361138e73786af8f4dd876b9d9851cfe6342b53c8/grpcio-1.75.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:683cfc70be0c1383449097cba637317e4737a357cfc185d887fd984206380403", size = 6289953, upload-time = "2025-09-26T09:01:03.699Z" }, + { url = "https://files.pythonhosted.org/packages/87/aa/0f40b7f47a0ff10d7e482bc3af22dac767c7ff27205915f08962d5ca87a2/grpcio-1.75.1-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:491444c081a54dcd5e6ada57314321ae526377f498d4aa09d975c3241c5b9e1c", size = 6949785, upload-time = "2025-09-26T09:01:07.504Z" }, + { url = "https://files.pythonhosted.org/packages/a5/45/b04407e44050781821c84f26df71b3f7bc469923f92f9f8bc27f1406dbcc/grpcio-1.75.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ce08d4e112d0d38487c2b631ec8723deac9bc404e9c7b1011426af50a79999e4", size = 6465708, upload-time = "2025-09-26T09:01:11.028Z" }, + { url = "https://files.pythonhosted.org/packages/09/3e/4ae3ec0a4d20dcaafbb6e597defcde06399ccdc5b342f607323f3b47f0a3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5a2acda37fc926ccc4547977ac3e56b1df48fe200de968e8c8421f6e3093df6c", size = 7100912, upload-time = "2025-09-26T09:01:14.393Z" }, + { url = "https://files.pythonhosted.org/packages/34/3f/a9085dab5c313bb0cb853f222d095e2477b9b8490a03634cdd8d19daa5c3/grpcio-1.75.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:745c5fe6bf05df6a04bf2d11552c7d867a2690759e7ab6b05c318a772739bd75", size = 8042497, upload-time = "2025-09-26T09:01:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/c3/87/ea54eba931ab9ed3f999ba95f5d8d01a20221b664725bab2fe93e3dee848/grpcio-1.75.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:259526a7159d39e2db40d566fe3e8f8e034d0fb2db5bf9c00e09aace655a4c2b", size = 7493284, upload-time = "2025-09-26T09:01:20.896Z" }, + { url = "https://files.pythonhosted.org/packages/b7/5e/287f1bf1a998f4ac46ef45d518de3b5da08b4e86c7cb5e1108cee30b0282/grpcio-1.75.1-cp310-cp310-win32.whl", hash = "sha256:f4b29b9aabe33fed5df0a85e5f13b09ff25e2c05bd5946d25270a8bd5682dac9", size = 3950809, upload-time = "2025-09-26T09:01:23.695Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a2/3cbfc06a4ec160dc77403b29ecb5cf76ae329eb63204fea6a7c715f1dfdb/grpcio-1.75.1-cp310-cp310-win_amd64.whl", hash = "sha256:cf2e760978dcce7ff7d465cbc7e276c3157eedc4c27aa6de7b594c7a295d3d61", size = 4644704, upload-time = "2025-09-26T09:01:25.763Z" }, + { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, + { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, + { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, + { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, + { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, + { url = "https://files.pythonhosted.org/packages/46/74/bac4ab9f7722164afdf263ae31ba97b8174c667153510322a5eba4194c32/grpcio-1.75.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:3bed22e750d91d53d9e31e0af35a7b0b51367e974e14a4ff229db5b207647884", size = 5672779, upload-time = "2025-09-26T09:02:19.11Z" }, + { url = "https://files.pythonhosted.org/packages/a6/52/d0483cfa667cddaa294e3ab88fd2c2a6e9dc1a1928c0e5911e2e54bd5b50/grpcio-1.75.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5b8f381eadcd6ecaa143a21e9e80a26424c76a0a9b3d546febe6648f3a36a5ac", size = 11470623, upload-time = "2025-09-26T09:02:22.117Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e4/d1954dce2972e32384db6a30273275e8c8ea5a44b80347f9055589333b3f/grpcio-1.75.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5bf4001d3293e3414d0cf99ff9b1139106e57c3a66dfff0c5f60b2a6286ec133", size = 6248838, upload-time = "2025-09-26T09:02:26.426Z" }, + { url = "https://files.pythonhosted.org/packages/06/43/073363bf63826ba8077c335d797a8d026f129dc0912b69c42feaf8f0cd26/grpcio-1.75.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f82ff474103e26351dacfe8d50214e7c9322960d8d07ba7fa1d05ff981c8b2d", size = 6922663, upload-time = "2025-09-26T09:02:28.724Z" }, + { url = "https://files.pythonhosted.org/packages/c2/6f/076ac0df6c359117676cacfa8a377e2abcecec6a6599a15a672d331f6680/grpcio-1.75.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ee119f4f88d9f75414217823d21d75bfe0e6ed40135b0cbbfc6376bc9f7757d", size = 6436149, upload-time = "2025-09-26T09:02:30.971Z" }, + { url = "https://files.pythonhosted.org/packages/6b/27/1d08824f1d573fcb1fa35ede40d6020e68a04391709939e1c6f4193b445f/grpcio-1.75.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:664eecc3abe6d916fa6cf8dd6b778e62fb264a70f3430a3180995bf2da935446", size = 7067989, upload-time = "2025-09-26T09:02:33.233Z" }, + { url = "https://files.pythonhosted.org/packages/c6/98/98594cf97b8713feb06a8cb04eeef60b4757e3e2fb91aa0d9161da769843/grpcio-1.75.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c32193fa08b2fbebf08fe08e84f8a0aad32d87c3ad42999c65e9449871b1c66e", size = 8010717, upload-time = "2025-09-26T09:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7e/bb80b1bba03c12158f9254762cdf5cced4a9bc2e8ed51ed335915a5a06ef/grpcio-1.75.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5cebe13088b9254f6e615bcf1da9131d46cfa4e88039454aca9cb65f639bd3bc", size = 7463822, upload-time = "2025-09-26T09:02:38.26Z" }, + { url = "https://files.pythonhosted.org/packages/23/1c/1ea57fdc06927eb5640f6750c697f596f26183573069189eeaf6ef86ba2d/grpcio-1.75.1-cp313-cp313-win32.whl", hash = "sha256:4b4c678e7ed50f8ae8b8dbad15a865ee73ce12668b6aaf411bf3258b5bc3f970", size = 3938490, upload-time = "2025-09-26T09:02:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/4b/24/fbb8ff1ccadfbf78ad2401c41aceaf02b0d782c084530d8871ddd69a2d49/grpcio-1.75.1-cp313-cp313-win_amd64.whl", hash = "sha256:5573f51e3f296a1bcf71e7a690c092845fb223072120f4bdb7a5b48e111def66", size = 4642538, upload-time = "2025-09-26T09:02:42.519Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1b/9a0a5cecd24302b9fdbcd55d15ed6267e5f3d5b898ff9ac8cbe17ee76129/grpcio-1.75.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:c05da79068dd96723793bffc8d0e64c45f316248417515f28d22204d9dae51c7", size = 5673319, upload-time = "2025-09-26T09:02:44.742Z" }, + { url = "https://files.pythonhosted.org/packages/c6/ec/9d6959429a83fbf5df8549c591a8a52bb313976f6646b79852c4884e3225/grpcio-1.75.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06373a94fd16ec287116a825161dca179a0402d0c60674ceeec8c9fba344fe66", size = 11480347, upload-time = "2025-09-26T09:02:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/09/7a/26da709e42c4565c3d7bf999a9569da96243ce34a8271a968dee810a7cf1/grpcio-1.75.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4484f4b7287bdaa7a5b3980f3c7224c3c622669405d20f69549f5fb956ad0421", size = 6254706, upload-time = "2025-09-26T09:02:50.4Z" }, + { url = "https://files.pythonhosted.org/packages/f1/08/dcb26a319d3725f199c97e671d904d84ee5680de57d74c566a991cfab632/grpcio-1.75.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2720c239c1180eee69f7883c1d4c83fc1a495a2535b5fa322887c70bf02b16e8", size = 6922501, upload-time = "2025-09-26T09:02:52.711Z" }, + { url = "https://files.pythonhosted.org/packages/78/66/044d412c98408a5e23cb348845979a2d17a2e2b6c3c34c1ec91b920f49d0/grpcio-1.75.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:07a554fa31c668cf0e7a188678ceeca3cb8fead29bbe455352e712ec33ca701c", size = 6437492, upload-time = "2025-09-26T09:02:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/4e/9d/5e3e362815152aa1afd8b26ea613effa005962f9da0eec6e0e4527e7a7d1/grpcio-1.75.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3e71a2105210366bfc398eef7f57a664df99194f3520edb88b9c3a7e46ee0d64", size = 7081061, upload-time = "2025-09-26T09:02:58.261Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1a/46615682a19e100f46e31ddba9ebc297c5a5ab9ddb47b35443ffadb8776c/grpcio-1.75.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8679aa8a5b67976776d3c6b0521e99d1c34db8a312a12bcfd78a7085cb9b604e", size = 8010849, upload-time = "2025-09-26T09:03:00.548Z" }, + { url = "https://files.pythonhosted.org/packages/67/8e/3204b94ac30b0f675ab1c06540ab5578660dc8b690db71854d3116f20d00/grpcio-1.75.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:aad1c774f4ebf0696a7f148a56d39a3432550612597331792528895258966dc0", size = 7464478, upload-time = "2025-09-26T09:03:03.096Z" }, + { url = "https://files.pythonhosted.org/packages/b7/97/2d90652b213863b2cf466d9c1260ca7e7b67a16780431b3eb1d0420e3d5b/grpcio-1.75.1-cp314-cp314-win32.whl", hash = "sha256:62ce42d9994446b307649cb2a23335fa8e927f7ab2cbf5fcb844d6acb4d85f9c", size = 4012672, upload-time = "2025-09-26T09:03:05.477Z" }, + { url = "https://files.pythonhosted.org/packages/f9/df/e2e6e9fc1c985cd1a59e6996a05647c720fe8a03b92f5ec2d60d366c531e/grpcio-1.75.1-cp314-cp314-win_amd64.whl", hash = "sha256:f86e92275710bea3000cb79feca1762dc0ad3b27830dd1a74e82ab321d4ee464", size = 4772475, upload-time = "2025-09-26T09:03:07.661Z" }, +] + +[[package]] +name = "grpcio-tools" +version = "1.71.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "grpcio" }, + { name = "protobuf" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/ad/e74a4d1cffff628c2ef1ec5b9944fb098207cc4af6eb8db4bc52e6d99236/grpcio_tools-1.71.2-cp310-cp310-linux_armv7l.whl", hash = "sha256:ab8a28c2e795520d6dc6ffd7efaef4565026dbf9b4f5270de2f3dd1ce61d2318", size = 2385557, upload-time = "2025-06-28T04:20:38.833Z" }, + { url = "https://files.pythonhosted.org/packages/63/bf/30b63418279d6fdc4fd4a3781a7976c40c7e8ee052333b9ce6bd4ce63f30/grpcio_tools-1.71.2-cp310-cp310-macosx_10_14_universal2.whl", hash = "sha256:654ecb284a592d39a85556098b8c5125163435472a20ead79b805cf91814b99e", size = 5446915, upload-time = "2025-06-28T04:20:40.947Z" }, + { url = "https://files.pythonhosted.org/packages/83/cd/2994e0a0a67714fdb00c207c4bec60b9b356fbd6b0b7a162ecaabe925155/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b49aded2b6c890ff690d960e4399a336c652315c6342232c27bd601b3705739e", size = 2348301, upload-time = "2025-06-28T04:20:42.766Z" }, + { url = "https://files.pythonhosted.org/packages/5b/8b/4f2315927af306af1b35793b332b9ca9dc5b5a2cde2d55811c9577b5f03f/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7811a6fc1c4b4e5438e5eb98dbd52c2dc4a69d1009001c13356e6636322d41a", size = 2742159, upload-time = "2025-06-28T04:20:44.206Z" }, + { url = "https://files.pythonhosted.org/packages/8d/98/d513f6c09df405c82583e7083c20718ea615ed0da69ec42c80ceae7ebdc5/grpcio_tools-1.71.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393a9c80596aa2b3f05af854e23336ea8c295593bbb35d9adae3d8d7943672bd", size = 2473444, upload-time = "2025-06-28T04:20:45.5Z" }, + { url = "https://files.pythonhosted.org/packages/fa/fe/00af17cc841916d5e4227f11036bf443ce006629212c876937c7904b0ba3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:823e1f23c12da00f318404c4a834bb77cd150d14387dee9789ec21b335249e46", size = 2850339, upload-time = "2025-06-28T04:20:46.758Z" }, + { url = "https://files.pythonhosted.org/packages/7d/59/745fc50dfdbed875fcfd6433883270d39d23fb1aa4ecc9587786f772dce3/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9bfbea79d6aec60f2587133ba766ede3dc3e229641d1a1e61d790d742a3d19eb", size = 3300795, upload-time = "2025-06-28T04:20:48.327Z" }, + { url = "https://files.pythonhosted.org/packages/62/3e/d9d0fb2df78e601c28d02ef0cd5d007f113c1b04fc21e72bf56e8c3df66b/grpcio_tools-1.71.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:32f3a67b10728835b5ffb63fbdbe696d00e19a27561b9cf5153e72dbb93021ba", size = 2913729, upload-time = "2025-06-28T04:20:49.641Z" }, + { url = "https://files.pythonhosted.org/packages/09/ae/ddb264b4a10c6c10336a7c177f8738b230c2c473d0c91dd5d8ce8ea1b857/grpcio_tools-1.71.2-cp310-cp310-win32.whl", hash = "sha256:7fcf9d92c710bfc93a1c0115f25e7d49a65032ff662b38b2f704668ce0a938df", size = 945997, upload-time = "2025-06-28T04:20:50.9Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8d/5efd93698fe359f63719d934ebb2d9337e82d396e13d6bf00f4b06793e37/grpcio_tools-1.71.2-cp310-cp310-win_amd64.whl", hash = "sha256:914b4275be810290266e62349f2d020bb7cc6ecf9edb81da3c5cddb61a95721b", size = 1117474, upload-time = "2025-06-28T04:20:52.54Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/0568d38b8da6237ea8ea15abb960fb7ab83eb7bb51e0ea5926dab3d865b1/grpcio_tools-1.71.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:0acb8151ea866be5b35233877fbee6445c36644c0aa77e230c9d1b46bf34b18b", size = 2385557, upload-time = "2025-06-28T04:20:54.323Z" }, + { url = "https://files.pythonhosted.org/packages/76/fb/700d46f72b0f636cf0e625f3c18a4f74543ff127471377e49a071f64f1e7/grpcio_tools-1.71.2-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:b28f8606f4123edb4e6da281547465d6e449e89f0c943c376d1732dc65e6d8b3", size = 5447590, upload-time = "2025-06-28T04:20:55.836Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/d9bb2aec3de305162b23c5c884b9f79b1a195d42b1e6dabcc084cc9d0804/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:cbae6f849ad2d1f5e26cd55448b9828e678cb947fa32c8729d01998238266a6a", size = 2348495, upload-time = "2025-06-28T04:20:57.33Z" }, + { url = "https://files.pythonhosted.org/packages/d5/83/f840aba1690461b65330efbca96170893ee02fae66651bcc75f28b33a46c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d1027615cfb1e9b1f31f2f384251c847d68c2f3e025697e5f5c72e26ed1316", size = 2742333, upload-time = "2025-06-28T04:20:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/30/34/c02cd9b37de26045190ba665ee6ab8597d47f033d098968f812d253bbf8c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bac95662dc69338edb9eb727cc3dd92342131b84b12b3e8ec6abe973d4cbf1b", size = 2473490, upload-time = "2025-06-28T04:21:00.614Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c7/375718ae091c8f5776828ce97bdcb014ca26244296f8b7f70af1a803ed2f/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c50250c7248055040f89eb29ecad39d3a260a4b6d3696af1575945f7a8d5dcdc", size = 2850333, upload-time = "2025-06-28T04:21:01.95Z" }, + { url = "https://files.pythonhosted.org/packages/19/37/efc69345bd92a73b2bc80f4f9e53d42dfdc234b2491ae58c87da20ca0ea5/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6ab1ad955e69027ef12ace4d700c5fc36341bdc2f420e87881e9d6d02af3d7b8", size = 3300748, upload-time = "2025-06-28T04:21:03.451Z" }, + { url = "https://files.pythonhosted.org/packages/d2/1f/15f787eb25ae42086f55ed3e4260e85f385921c788debf0f7583b34446e3/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd75dde575781262b6b96cc6d0b2ac6002b2f50882bf5e06713f1bf364ee6e09", size = 2913178, upload-time = "2025-06-28T04:21:04.879Z" }, + { url = "https://files.pythonhosted.org/packages/12/aa/69cb3a9dff7d143a05e4021c3c9b5cde07aacb8eb1c892b7c5b9fb4973e3/grpcio_tools-1.71.2-cp311-cp311-win32.whl", hash = "sha256:9a3cb244d2bfe0d187f858c5408d17cb0e76ca60ec9a274c8fd94cc81457c7fc", size = 946256, upload-time = "2025-06-28T04:21:06.518Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/fb951c5c87eadb507a832243942e56e67d50d7667b0e5324616ffd51b845/grpcio_tools-1.71.2-cp311-cp311-win_amd64.whl", hash = "sha256:00eb909997fd359a39b789342b476cbe291f4dd9c01ae9887a474f35972a257e", size = 1117661, upload-time = "2025-06-28T04:21:08.18Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" }, + { url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" }, + { url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8a/e4c1c4cb8c9ff7f50b7b2bba94abe8d1e98ea05f52a5db476e7f1c1a3c70/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a28eda8137d587eb30081384c256f5e5de7feda34776f89848b846da64e4be35", size = 2743321, upload-time = "2025-06-28T04:21:15.007Z" }, + { url = "https://files.pythonhosted.org/packages/fd/aa/95bc77fda5c2d56fb4a318c1b22bdba8914d5d84602525c99047114de531/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19c083198f5eb15cc69c0a2f2c415540cbc636bfe76cea268e5894f34023b40", size = 2474005, upload-time = "2025-06-28T04:21:16.443Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ff/ca11f930fe1daa799ee0ce1ac9630d58a3a3deed3dd2f465edb9a32f299d/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:784c284acda0d925052be19053d35afbf78300f4d025836d424cf632404f676a", size = 2851559, upload-time = "2025-06-28T04:21:18.139Z" }, + { url = "https://files.pythonhosted.org/packages/64/10/c6fc97914c7e19c9bb061722e55052fa3f575165da9f6510e2038d6e8643/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:381e684d29a5d052194e095546eef067201f5af30fd99b07b5d94766f44bf1ae", size = 3300622, upload-time = "2025-06-28T04:21:20.291Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d6/965f36cfc367c276799b730d5dd1311b90a54a33726e561393b808339b04/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3e4b4801fabd0427fc61d50d09588a01b1cfab0ec5e8a5f5d515fbdd0891fd11", size = 2913863, upload-time = "2025-06-28T04:21:22.196Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f0/c05d5c3d0c1d79ac87df964e9d36f1e3a77b60d948af65bec35d3e5c75a3/grpcio_tools-1.71.2-cp312-cp312-win32.whl", hash = "sha256:84ad86332c44572305138eafa4cc30040c9a5e81826993eae8227863b700b490", size = 945744, upload-time = "2025-06-28T04:21:23.463Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e9/c84c1078f0b7af7d8a40f5214a9bdd8d2a567ad6c09975e6e2613a08d29d/grpcio_tools-1.71.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e1108d37eecc73b1c4a27350a6ed921b5dda25091700c1da17cfe30761cd462", size = 1117695, upload-time = "2025-06-28T04:21:25.22Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/bdf9c5055a1ad0a09123402d73ecad3629f75b9cf97828d547173b328891/grpcio_tools-1.71.2-cp313-cp313-linux_armv7l.whl", hash = "sha256:b0f0a8611614949c906e25c225e3360551b488d10a366c96d89856bcef09f729", size = 2384758, upload-time = "2025-06-28T04:21:26.712Z" }, + { url = "https://files.pythonhosted.org/packages/49/d0/6aaee4940a8fb8269c13719f56d69c8d39569bee272924086aef81616d4a/grpcio_tools-1.71.2-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:7931783ea7ac42ac57f94c5047d00a504f72fbd96118bf7df911bb0e0435fc0f", size = 5443127, upload-time = "2025-06-28T04:21:28.383Z" }, + { url = "https://files.pythonhosted.org/packages/d9/11/50a471dcf301b89c0ed5ab92c533baced5bd8f796abfd133bbfadf6b60e5/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:d188dc28e069aa96bb48cb11b1338e47ebdf2e2306afa58a8162cc210172d7a8", size = 2349627, upload-time = "2025-06-28T04:21:30.254Z" }, + { url = "https://files.pythonhosted.org/packages/bb/66/e3dc58362a9c4c2fbe98a7ceb7e252385777ebb2bbc7f42d5ab138d07ace/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f36c4b3cc42ad6ef67430639174aaf4a862d236c03c4552c4521501422bfaa26", size = 2742932, upload-time = "2025-06-28T04:21:32.325Z" }, + { url = "https://files.pythonhosted.org/packages/b7/1e/1e07a07ed8651a2aa9f56095411198385a04a628beba796f36d98a5a03ec/grpcio_tools-1.71.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bd9ed12ce93b310f0cef304176049d0bc3b9f825e9c8c6a23e35867fed6affd", size = 2473627, upload-time = "2025-06-28T04:21:33.752Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f9/3b7b32e4acb419f3a0b4d381bc114fe6cd48e3b778e81273fc9e4748caad/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7ce27e76dd61011182d39abca38bae55d8a277e9b7fe30f6d5466255baccb579", size = 2850879, upload-time = "2025-06-28T04:21:35.241Z" }, + { url = "https://files.pythonhosted.org/packages/1e/99/cd9e1acd84315ce05ad1fcdfabf73b7df43807cf00c3b781db372d92b899/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:dcc17bf59b85c3676818f2219deacac0156492f32ca165e048427d2d3e6e1157", size = 3300216, upload-time = "2025-06-28T04:21:36.826Z" }, + { url = "https://files.pythonhosted.org/packages/9f/c0/66eab57b14550c5b22404dbf60635c9e33efa003bd747211981a9859b94b/grpcio_tools-1.71.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:706360c71bdd722682927a1fb517c276ccb816f1e30cb71f33553e5817dc4031", size = 2913521, upload-time = "2025-06-28T04:21:38.347Z" }, + { url = "https://files.pythonhosted.org/packages/05/9b/7c90af8f937d77005625d705ab1160bc42a7e7b021ee5c788192763bccd6/grpcio_tools-1.71.2-cp313-cp313-win32.whl", hash = "sha256:bcf751d5a81c918c26adb2d6abcef71035c77d6eb9dd16afaf176ee096e22c1d", size = 945322, upload-time = "2025-06-28T04:21:39.864Z" }, + { url = "https://files.pythonhosted.org/packages/5f/80/6db6247f767c94fe551761772f89ceea355ff295fd4574cb8efc8b2d1199/grpcio_tools-1.71.2-cp313-cp313-win_amd64.whl", hash = "sha256:b1581a1133552aba96a730178bc44f6f1a071f0eb81c5b6bc4c0f89f5314e2b8", size = 1117234, upload-time = "2025-06-28T04:21:41.893Z" }, +] + [[package]] name = "grpclib" version = "0.4.7" @@ -1432,6 +1546,23 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d0/9e/984486f2d0a0bd2b024bf4bc1c62688fcafa9e61991f041fb0e2def4a982/h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0", size = 60957, upload-time = "2025-02-01T11:02:26.481Z" }, ] +[[package]] +name = "hatchet-sdk" +version = "1.19.1" +source = { git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai#ed3a64aa36f0e4c15da148023894422ed5b1eccd" } +dependencies = [ + { name = "aiohttp" }, + { name = "grpcio" }, + { name = "grpcio-tools" }, + { name = "prometheus-client" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-dateutil" }, + { name = "tenacity" }, + { name = "urllib3" }, +] + [[package]] name = "hf-xet" version = "1.1.3" @@ -2839,6 +2970,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0c/dd/f0183ed0145e58cf9d286c1b2c14f63ccee987a4ff79ac85acc31b5d86bd/primp-0.15.0-cp38-abi3-win_amd64.whl", hash = "sha256:aeb6bd20b06dfc92cfe4436939c18de88a58c640752cf7f30d9e4ae893cdec32", size = 3149967, upload-time = "2025-04-17T11:41:07.067Z" }, ] +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + [[package]] name = "prompt-toolkit" version = "3.0.50" @@ -2942,16 +3082,16 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.3" +version = "5.29.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/d1/e0a911544ca9993e0f17ce6d3cc0932752356c1b0a834397f28e63479344/protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620", size = 424945, upload-time = "2025-01-08T21:38:51.572Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/7a/1e38f3cafa022f477ca0f57a1f49962f21ad25850c3ca0acd3b9d0091518/protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888", size = 422708, upload-time = "2025-01-08T21:38:31.799Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/aae8e10512b83de633f2646506a6d835b151edf4b30d18d73afd01447253/protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a", size = 434508, upload-time = "2025-01-08T21:38:35.489Z" }, - { url = "https://files.pythonhosted.org/packages/dd/04/3eaedc2ba17a088961d0e3bd396eac764450f431621b58a04ce898acd126/protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e", size = 417825, upload-time = "2025-01-08T21:38:36.642Z" }, - { url = "https://files.pythonhosted.org/packages/4f/06/7c467744d23c3979ce250397e26d8ad8eeb2bea7b18ca12ad58313c1b8d5/protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84", size = 319573, upload-time = "2025-01-08T21:38:37.896Z" }, - { url = "https://files.pythonhosted.org/packages/a8/45/2ebbde52ad2be18d3675b6bee50e68cd73c9e0654de77d595540b5129df8/protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f", size = 319672, upload-time = "2025-01-08T21:38:40.204Z" }, - { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550, upload-time = "2025-01-08T21:38:50.439Z" }, + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, ] [[package]] @@ -3128,6 +3268,9 @@ dbos = [ examples = [ { name = "pydantic-ai-examples" }, ] +hatchet = [ + { name = "pydantic-ai-slim", extra = ["hatchet"] }, +] [package.dev-dependencies] dev = [ @@ -3178,8 +3321,9 @@ requires-dist = [ { name = "pydantic-ai-examples", marker = "extra == 'examples'", editable = "examples" }, { name = "pydantic-ai-slim", extras = ["ag-ui", "anthropic", "bedrock", "cli", "cohere", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "temporal", "vertexai"], editable = "pydantic_ai_slim" }, { name = "pydantic-ai-slim", extras = ["dbos"], marker = "extra == 'dbos'", editable = "pydantic_ai_slim" }, + { name = "pydantic-ai-slim", extras = ["hatchet"], marker = "extra == 'hatchet'", editable = "pydantic_ai_slim" }, ] -provides-extras = ["a2a", "dbos", "examples"] +provides-extras = ["a2a", "dbos", "examples", "hatchet"] [package.metadata.requires-dev] dev = [ @@ -3316,6 +3460,9 @@ google = [ groq = [ { name = "groq" }, ] +hatchet = [ + { name = "hatchet-sdk" }, +] huggingface = [ { name = "huggingface-hub", extra = ["inference"] }, ] @@ -3361,6 +3508,7 @@ requires-dist = [ { name = "google-genai", marker = "extra == 'google'", specifier = ">=1.31.0" }, { name = "griffe", specifier = ">=1.3.2" }, { name = "groq", marker = "extra == 'groq'", specifier = ">=0.25.0" }, + { name = "hatchet-sdk", marker = "extra == 'hatchet'", git = "https://github.com/hatchet-dev/hatchet.git?subdirectory=sdks%2Fpython&rev=mk%2Ftweaks-for-pydantic-ai" }, { name = "httpx", specifier = ">=0.27" }, { name = "huggingface-hub", extras = ["inference"], marker = "extra == 'huggingface'", specifier = ">=0.33.5" }, { name = "logfire", extras = ["httpx"], marker = "extra == 'logfire'", specifier = ">=3.14.1" }, @@ -3381,7 +3529,7 @@ requires-dist = [ { name = "tenacity", marker = "extra == 'retries'", specifier = ">=8.2.3" }, { name = "typing-inspection", specifier = ">=0.4.0" }, ] -provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] +provides-extras = ["a2a", "ag-ui", "anthropic", "bedrock", "cli", "cohere", "dbos", "duckduckgo", "evals", "google", "groq", "hatchet", "huggingface", "logfire", "mcp", "mistral", "openai", "retries", "tavily", "temporal", "vertexai"] [[package]] name = "pydantic-core" @@ -4088,6 +4236,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6a/23/8146aad7d88f4fcb3a6218f41a60f6c2d4e3a72de72da1825dc7c8f7877c/semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177", size = 15552, upload-time = "2022-05-26T13:35:21.206Z" }, ] +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -4529,11 +4686,11 @@ wheels = [ [[package]] name = "urllib3" -version = "2.3.0" +version = "2.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268, upload-time = "2024-12-22T07:47:30.032Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369, upload-time = "2024-12-22T07:47:28.074Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] [[package]] From 92e60ffbf1af6f2690112f1612c887f306961404 Mon Sep 17 00:00:00 2001 From: mrkaye97 Date: Thu, 2 Oct 2025 15:12:07 -0400 Subject: [PATCH 42/42] fix: appease the type checker --- .../pydantic_ai/durable_exec/hatchet/_agent.py | 5 ++++- .../pydantic_ai/durable_exec/hatchet/_function_toolset.py | 2 +- .../pydantic_ai/durable_exec/hatchet/_mcp_server.py | 8 ++------ .../pydantic_ai/durable_exec/hatchet/_model.py | 3 ++- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py index 7ab150b755..ea626614f2 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_agent.py @@ -11,7 +11,8 @@ from typing_extensions import Never from pydantic_ai import _utils, messages as _messages, models, usage as _usage -from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, RunOutputDataT, WrapperAgent +from pydantic_ai.agent import AbstractAgent, AgentRun, AgentRunResult, EventStreamHandler, WrapperAgent +from pydantic_ai.agent.abstract import Instructions, RunOutputDataT from pydantic_ai.exceptions import UserError from pydantic_ai.messages import AgentStreamEvent from pydantic_ai.models import Model @@ -652,6 +653,7 @@ def override( model: models.Model | models.KnownModelName | str | _utils.Unset = _utils.UNSET, toolsets: Sequence[AbstractToolset[AgentDepsT]] | _utils.Unset = _utils.UNSET, tools: Sequence[Tool[AgentDepsT] | ToolFuncEither[AgentDepsT, ...]] | _utils.Unset = _utils.UNSET, + instructions: Instructions[AgentDepsT] | _utils.Unset = _utils.UNSET, ) -> Iterator[None]: """Context manager to temporarily override agent dependencies, model, toolsets, or tools. @@ -663,6 +665,7 @@ def override( model: The model to use instead of the model passed to the agent run. toolsets: The toolsets to use instead of the toolsets passed to the agent constructor and agent run. tools: The tools to use instead of the tools registered with the agent. + instructions: The instructions to use instead of the instructions registered with the agent. """ with super().override(deps=deps, model=model, toolsets=toolsets, tools=tools): yield diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py index c1c8de7b29..ede2ffafbd 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_function_toolset.py @@ -66,7 +66,7 @@ async def tool_task( _ctx: Context, ) -> ToolOutput: run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.deps, hatchet_context=_ctx + input.serialized_run_context, deps=input.deps ) tool = (await wrapped.get_tools(run_context))[current_tool_name] diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py index 0ad942a4e5..9306e8a6b4 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_mcp_server.py @@ -87,9 +87,7 @@ async def wrapped_get_tools_task( input: GetToolsInput[AgentDepsT], _ctx: Context, ) -> dict[str, ToolDefinition]: - run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.deps, hatchet_context=_ctx - ) + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) # ToolsetTool is not serializable as it holds a SchemaValidator (which is also the same for every MCP tool so unnecessary to pass along the wire every time), # so we just return the ToolDefinitions and wrap them in ToolsetTool outside of the activity. @@ -120,9 +118,7 @@ async def wrapped_call_tool_task( input: CallToolInput[AgentDepsT], _ctx: Context, ) -> CallToolOutput[AgentDepsT]: - run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.deps, hatchet_context=_ctx - ) + run_context = self.run_context_type.deserialize_run_context(input.serialized_run_context, deps=input.deps) tool = self.tool_for_tool_def(input.tool_def) result = await super(HatchetMCPServer, self).call_tool(input.name, input.tool_args, run_context, tool) diff --git a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py index 7d797f0fc8..6875fdb5c9 100644 --- a/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py +++ b/pydantic_ai_slim/pydantic_ai/durable_exec/hatchet/_model.py @@ -118,7 +118,8 @@ async def wrapped_request_stream_task( assert self.event_stream_handler run_context = self.run_context_type.deserialize_run_context( - input.serialized_run_context, deps=input.serialized_run_context, hatchet_context=ctx + input.serialized_run_context, + deps=input.serialized_run_context, ) async with self.wrapped.request_stream(