From 5ccd08e8a5cdf47612e70b83a1c71a9bb6bf7124 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 28 Jan 2025 14:02:30 -0500 Subject: [PATCH 01/96] Copy of pydantic v1 converter from samples-python --- temporalio/contrib/pydantic/converter.py | 58 +++++++++++++++ tests/contrib/test_pydantic.py | 94 ++++++++++++++++++++++++ 2 files changed, 152 insertions(+) create mode 100644 temporalio/contrib/pydantic/converter.py create mode 100644 tests/contrib/test_pydantic.py diff --git a/temporalio/contrib/pydantic/converter.py b/temporalio/contrib/pydantic/converter.py new file mode 100644 index 000000000..81997e81b --- /dev/null +++ b/temporalio/contrib/pydantic/converter.py @@ -0,0 +1,58 @@ +import json +from typing import Any, Optional + +from pydantic.json import pydantic_encoder +from temporalio.api.common.v1 import Payload +from temporalio.converter import ( + CompositePayloadConverter, + DataConverter, + DefaultPayloadConverter, + JSONPlainPayloadConverter, +) + + +class PydanticJSONPayloadConverter(JSONPlainPayloadConverter): + """Pydantic JSON payload converter. + + This extends the :py:class:`JSONPlainPayloadConverter` to override + :py:meth:`to_payload` using the Pydantic encoder. + """ + + def to_payload(self, value: Any) -> Optional[Payload]: + """Convert all values with Pydantic encoder or fail. + + Like the base class, we fail if we cannot convert. This payload + converter is expected to be the last in the chain, so it can fail if + unable to convert. + """ + # We let JSON conversion errors be thrown to caller + return Payload( + metadata={"encoding": self.encoding.encode()}, + data=json.dumps( + value, separators=(",", ":"), sort_keys=True, default=pydantic_encoder + ).encode(), + ) + + +class PydanticPayloadConverter(CompositePayloadConverter): + """Payload converter that replaces Temporal JSON conversion with Pydantic + JSON conversion. + """ + + def __init__(self) -> None: + super().__init__( + *( + ( + c + if not isinstance(c, JSONPlainPayloadConverter) + else PydanticJSONPayloadConverter() + ) + for c in DefaultPayloadConverter.default_encoding_payload_converters + ) + ) + + +pydantic_data_converter = DataConverter( + payload_converter_class=PydanticPayloadConverter +) +"""Data converter using Pydantic JSON conversion.""" diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py new file mode 100644 index 000000000..7873ddf44 --- /dev/null +++ b/tests/contrib/test_pydantic.py @@ -0,0 +1,94 @@ +import dataclasses +import uuid +from datetime import datetime, timedelta +from ipaddress import IPv4Address +from typing import List + +from pydantic import BaseModel + +from temporalio import activity, workflow +from temporalio.client import Client +from temporalio.contrib.pydantic.converter import pydantic_data_converter +from temporalio.worker import Worker +from temporalio.worker.workflow_sandbox import ( + SandboxedWorkflowRunner, + SandboxRestrictions, +) + + +class MyPydanticModel(BaseModel): + some_ip: IPv4Address + some_date: datetime + + +@activity.defn +async def my_activity(models: List[MyPydanticModel]) -> List[MyPydanticModel]: + activity.logger.info("Got models in activity: %s" % models) + return models + + +@workflow.defn +class MyWorkflow: + @workflow.run + async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: + workflow.logger.info("Got models in workflow: %s" % models) + return await workflow.execute_activity( + my_activity, models, start_to_close_timeout=timedelta(minutes=1) + ) + + +# Due to known issues with Pydantic's use of issubclass and our inability to +# override the check in sandbox, Pydantic will think datetime is actually date +# in the sandbox. At the expense of protecting against datetime.now() use in +# workflows, we're going to remove datetime module restrictions. See sdk-python +# README's discussion of known sandbox issues for more details. +def new_sandbox_runner() -> SandboxedWorkflowRunner: + # TODO(cretz): Use with_child_unrestricted when https://github.com/temporalio/sdk-python/issues/254 + # is fixed and released + invalid_module_member_children = dict( + SandboxRestrictions.invalid_module_members_default.children + ) + del invalid_module_member_children["datetime"] + return SandboxedWorkflowRunner( + restrictions=dataclasses.replace( + SandboxRestrictions.default, + invalid_module_members=dataclasses.replace( + SandboxRestrictions.invalid_module_members_default, + children=invalid_module_member_children, + ), + ) + ) + + +async def test_workflow_with_pydantic_model(client: Client): + # Replace data converter in client + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_models = [ + MyPydanticModel( + some_ip=IPv4Address("127.0.0.1"), + some_date=datetime(2000, 1, 2, 3, 4, 5), + ), + MyPydanticModel( + some_ip=IPv4Address("127.0.0.2"), + some_date=datetime(2001, 2, 3, 4, 5, 6), + ), + ] + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[MyWorkflow], + activities=[my_activity], + workflow_runner=new_sandbox_runner(), + ): + result = await client.execute_workflow( + MyWorkflow.run, + orig_models, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert orig_models == result From afa81844b71a40417eff9ef5866a51abf98f7789 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 2 Feb 2025 13:43:04 -0500 Subject: [PATCH 02/96] Cleanup --- temporalio/contrib/pydantic/converter.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/temporalio/contrib/pydantic/converter.py b/temporalio/contrib/pydantic/converter.py index 81997e81b..a3c1cee66 100644 --- a/temporalio/contrib/pydantic/converter.py +++ b/temporalio/contrib/pydantic/converter.py @@ -42,11 +42,9 @@ class PydanticPayloadConverter(CompositePayloadConverter): def __init__(self) -> None: super().__init__( *( - ( - c - if not isinstance(c, JSONPlainPayloadConverter) - else PydanticJSONPayloadConverter() - ) + c + if not isinstance(c, JSONPlainPayloadConverter) + else PydanticJSONPayloadConverter() for c in DefaultPayloadConverter.default_encoding_payload_converters ) ) From 5ccd203da119cddf01be0a64f798aaadfe97633c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 30 Jan 2025 12:40:43 -0500 Subject: [PATCH 03/96] update pydantic to v2 --- poetry.lock | 294 ++++++++++++++++++++++++++++++++++++++++--------- pyproject.toml | 3 +- 2 files changed, 243 insertions(+), 54 deletions(-) diff --git a/poetry.lock b/poetry.lock index efa403d5e..b48edfa5b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,17 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pydantic\"" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] [[package]] name = "attrs" @@ -6,6 +19,7 @@ version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, @@ -25,6 +39,7 @@ version = "24.8.1" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "Automat-24.8.1-py3-none-any.whl", hash = "sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a"}, {file = "automat-24.8.1.tar.gz", hash = "sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88"}, @@ -42,6 +57,7 @@ version = "0.16" description = "Python parser for bash" optional = false python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4" +groups = ["dev"] files = [ {file = "bashlex-0.16-py2.py3-none-any.whl", hash = "sha256:ff89fc743ccdef978792784d74d698a9236a862939bb4af471c0c3faf92c21bb"}, {file = "bashlex-0.16.tar.gz", hash = "sha256:dc6f017e49ce2d0fe30ad9f5206da9cd13ded073d365688c9fda525354e8c373"}, @@ -53,6 +69,7 @@ version = "5.0.1" description = "An easy safelist-based HTML-sanitizing tool." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "bleach-5.0.1-py3-none-any.whl", hash = "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a"}, {file = "bleach-5.0.1.tar.gz", hash = "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c"}, @@ -72,6 +89,7 @@ version = "2.3.post1" description = "Bash style brace expander." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "bracex-2.3.post1-py3-none-any.whl", hash = "sha256:351b7f20d56fb9ea91f9b9e9e7664db466eb234188c175fd943f8f755c807e73"}, {file = "bracex-2.3.post1.tar.gz", hash = "sha256:e7b23fc8b2cd06d3dec0692baabecb249dda94e06a617901ff03a6c56fd71693"}, @@ -83,6 +101,7 @@ version = "0.14.1" description = "httplib2 caching for requests" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cachecontrol-0.14.1-py3-none-any.whl", hash = "sha256:65e3abd62b06382ce3894df60dde9e0deb92aeb734724f68fa4f3b91e97206b9"}, {file = "cachecontrol-0.14.1.tar.gz", hash = "sha256:06ef916a1e4eb7dba9948cdfc9c76e749db2e02104a9a1277e8b642591a0f717"}, @@ -104,6 +123,7 @@ version = "2022.9.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "certifi-2022.9.14-py3-none-any.whl", hash = "sha256:e232343de1ab72c2aa521b625c80f699e356830fd0e2c620b465b304b17b0516"}, {file = "certifi-2022.9.14.tar.gz", hash = "sha256:36973885b9542e6bd01dea287b2b4b3b21236307c56324fcc3f1160f2d655ed5"}, @@ -115,6 +135,8 @@ version = "1.15.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform == \"linux\"" files = [ {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, @@ -191,6 +213,7 @@ version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.6.0" +groups = ["dev"] files = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, @@ -205,6 +228,7 @@ version = "2.22.0" description = "Build Python wheels on CI with minimal configuration." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cibuildwheel-2.22.0-py3-none-any.whl", hash = "sha256:c40bb7ac7b57fed8195fca624cc9bd68334375d32b75bea6fa8330ac1cd902c4"}, {file = "cibuildwheel-2.22.0.tar.gz", hash = "sha256:6651e775ac26a86a49d67639aa3540f19728caf0dfcd80f156ba4f241aad4940"}, @@ -230,6 +254,8 @@ version = "0.4.5" description = "Cross-platform colored terminal text." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] +markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, @@ -241,6 +267,7 @@ version = "0.9.1" description = "Python parser for the CommonMark Markdown spec" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -255,6 +282,7 @@ version = "1.5.3" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] files = [ {file = "ConfigArgParse-1.5.3-py3-none-any.whl", hash = "sha256:18f6535a2db9f6e02bd5626cc7455eac3e96b9ab3d969d366f9aafd5c5c00fe7"}, {file = "ConfigArgParse-1.5.3.tar.gz", hash = "sha256:1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f"}, @@ -270,6 +298,7 @@ version = "15.1.0" description = "Symbolic constants in Python" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, @@ -281,6 +310,8 @@ version = "38.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "sys_platform == \"linux\"" files = [ {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:10d1f29d6292fc95acb597bacefd5b9e812099d75a6469004fd38ba5471a977f"}, {file = "cryptography-38.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3fc26e22840b77326a764ceb5f02ca2d342305fba08f002a8c1f139540cdfaad"}, @@ -327,6 +358,7 @@ version = "1.3.0" description = "A tool for resolving PEP 735 Dependency Group data" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "dependency_groups-1.3.0-py3-none-any.whl", hash = "sha256:1abf34d712deda5581e80d507512664d52b35d1c2d7caf16c85e58ca508547e0"}, {file = "dependency_groups-1.3.0.tar.gz", hash = "sha256:5b9751d5d98fbd6dfd038a560a69c8382e41afcbf7ffdbcc28a2a3f85498830f"}, @@ -345,6 +377,8 @@ version = "1.2.13" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "extra == \"opentelemetry\"" files = [ {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, @@ -362,6 +396,7 @@ version = "0.19" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, @@ -373,6 +408,8 @@ version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, @@ -387,6 +424,7 @@ version = "3.8.0" description = "A platform independent file lock." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, @@ -402,6 +440,7 @@ version = "1.68.0" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, @@ -459,6 +498,7 @@ files = [ {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, ] +markers = {main = "extra == \"grpc\""} [package.extras] protobuf = ["grpcio-tools (>=1.68.0)"] @@ -469,6 +509,7 @@ version = "1.68.0" description = "Protobuf code generator for gRPC" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "grpcio_tools-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9509a5c3ed3d54fa7ac20748d501cb86668f764605a0a68f275339ee0f1dc1a6"}, {file = "grpcio_tools-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:59a885091bf29700ba0e14a954d156a18714caaa2006a7f328b18e1ac4b1e721"}, @@ -538,6 +579,7 @@ version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, @@ -552,6 +594,7 @@ version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -563,6 +606,7 @@ version = "4.12.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "importlib_metadata-4.12.0-py3-none-any.whl", hash = "sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"}, {file = "importlib_metadata-4.12.0.tar.gz", hash = "sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670"}, @@ -582,6 +626,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -600,6 +645,7 @@ version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -611,6 +657,7 @@ version = "3.2.2" description = "Utility functions for Python class constructs" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "jaraco.classes-3.2.2-py3-none-any.whl", hash = "sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647"}, {file = "jaraco.classes-3.2.2.tar.gz", hash = "sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594"}, @@ -629,6 +676,8 @@ version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "sys_platform == \"linux\"" files = [ {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, @@ -644,6 +693,7 @@ version = "23.9.1" description = "Store and access your passwords safely." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "keyring-23.9.1-py3-none-any.whl", hash = "sha256:3565b9e4ea004c96e158d2d332a49f466733d565bb24157a60fd2e49f41a0fd1"}, {file = "keyring-23.9.1.tar.gz", hash = "sha256:39e4f6572238d2615a82fcaa485e608b84b503cf080dc924c43bbbacb11c1c18"}, @@ -666,6 +716,7 @@ version = "0.6.2" description = "A Python implementation of Lunr.js" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "lunr-0.6.2-py2.py3-none-any.whl", hash = "sha256:6fbf619e07ff97c6198f9cae0fc32d69a0d49fb0708c2559efbede45670cba06"}, {file = "lunr-0.6.2.tar.gz", hash = "sha256:7983d965bd7baa78cbd4f5b934fc3ef3142c1b6089df6c6fecd66df5bff20921"}, @@ -680,6 +731,7 @@ version = "8.14.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "more-itertools-8.14.0.tar.gz", hash = "sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750"}, {file = "more_itertools-8.14.0-py3-none-any.whl", hash = "sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2"}, @@ -691,6 +743,7 @@ version = "1.0.4" description = "MessagePack serializer" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4ab251d229d10498e9a2f3b1e68ef64cb393394ec477e3370c457f9430ce9250"}, {file = "msgpack-1.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:112b0f93202d7c0fef0b7810d465fde23c746a2d482e1e2de2aafd2ce1492c88"}, @@ -752,6 +805,7 @@ version = "1.4.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, @@ -798,6 +852,7 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -809,6 +864,7 @@ version = "3.3.0" description = "Generate mypy stub files from protobuf specs" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mypy-protobuf-3.3.0.tar.gz", hash = "sha256:24f3b0aecb06656e983f58e07c732a90577b9d7af3e1066fc2b663bbf0370248"}, {file = "mypy_protobuf-3.3.0-py3-none-any.whl", hash = "sha256:15604f6943b16c05db646903261e3b3e775cf7f7990b7c37b03d043a907b650d"}, @@ -824,6 +880,7 @@ version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -835,6 +892,8 @@ version = "1.12.0" description = "OpenTelemetry Python API" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"opentelemetry\"" files = [ {file = "opentelemetry-api-1.12.0.tar.gz", hash = "sha256:740c2cf9aa75e76c208b3ee04b3b3b3721f58bbac8e97019174f07ec12cde7af"}, {file = "opentelemetry_api-1.12.0-py3-none-any.whl", hash = "sha256:2e1cef8ce175be6464f240422babfe1dfb581daec96f0daad5d0d0e951b38f7b"}, @@ -850,6 +909,8 @@ version = "1.12.0" description = "OpenTelemetry Python SDK" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"opentelemetry\"" files = [ {file = "opentelemetry-sdk-1.12.0.tar.gz", hash = "sha256:bf37830ca4f93d0910cf109749237c5cb4465e31a54dfad8400011e9822a2a14"}, {file = "opentelemetry_sdk-1.12.0-py3-none-any.whl", hash = "sha256:d13be09765441c0513a3de01b7a2f56a7da36d902f60bff7c97f338903a57c34"}, @@ -867,6 +928,8 @@ version = "0.33b0" description = "OpenTelemetry Semantic Conventions" optional = true python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"opentelemetry\"" files = [ {file = "opentelemetry-semantic-conventions-0.33b0.tar.gz", hash = "sha256:67d62461c87b683b958428ced79162ec4d567dabf30b050f270bbd01eff89ced"}, {file = "opentelemetry_semantic_conventions-0.33b0-py3-none-any.whl", hash = "sha256:56b67b3f8f49413cbfbbeb32e9cf7b4c7dfb27a83064d959733766376ba11bc7"}, @@ -878,6 +941,7 @@ version = "23.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, @@ -889,6 +953,7 @@ version = "1.8.3" description = "Query metadatdata from sdists / bdists / installed packages." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["dev"] files = [ {file = "pkginfo-1.8.3-py2.py3-none-any.whl", hash = "sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594"}, {file = "pkginfo-1.8.3.tar.gz", hash = "sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c"}, @@ -903,6 +968,7 @@ version = "2.5.2" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, @@ -918,6 +984,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -933,6 +1000,7 @@ version = "5.28.3" description = "" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "protobuf-5.28.3-cp310-abi3-win32.whl", hash = "sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24"}, {file = "protobuf-5.28.3-cp310-abi3-win_amd64.whl", hash = "sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868"}, @@ -953,6 +1021,7 @@ version = "5.9.3" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] files = [ {file = "psutil-5.9.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b4a247cd3feaae39bb6085fcebf35b3b8ecd9b022db796d89c8f05067ca28e71"}, {file = "psutil-5.9.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5fa88e3d5d0b480602553d362c4b33a63e0c40bfea7312a7bf78799e01e0810b"}, @@ -1001,6 +1070,8 @@ version = "2.21" description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] +markers = "sys_platform == \"linux\"" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1008,62 +1079,139 @@ files = [ [[package]] name = "pydantic" -version = "1.10.19" -description = "Data validation and settings management using python type hints" -optional = false -python-versions = ">=3.7" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pydantic\"" files = [ - {file = "pydantic-1.10.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a415b9e95fa602b10808113967f72b2da8722061265d6af69268c111c254832d"}, - {file = "pydantic-1.10.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11965f421f7eb026439d4eb7464e9182fe6d69c3d4d416e464a4485d1ba61ab6"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bb81fcfc6d5bff62cd786cbd87480a11d23f16d5376ad2e057c02b3b44df96"}, - {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ee8c9916689f8e6e7d90161e6663ac876be2efd32f61fdcfa3a15e87d4e413"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0399094464ae7f28482de22383e667625e38e1516d6b213176df1acdd0c477ea"}, - {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b2cf5e26da84f2d2dee3f60a3f1782adedcee785567a19b68d0af7e1534bd1f"}, - {file = "pydantic-1.10.19-cp310-cp310-win_amd64.whl", hash = "sha256:1fc8cc264afaf47ae6a9bcbd36c018d0c6b89293835d7fb0e5e1a95898062d59"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3"}, - {file = "pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3"}, - {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98"}, - {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526"}, - {file = "pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890"}, - {file = "pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e"}, - {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186"}, - {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086"}, - {file = "pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e"}, - {file = "pydantic-1.10.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a5d5b877c7d3d9e17399571a8ab042081d22fe6904416a8b20f8af5909e6c8f"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c46f58ef2df958ed2ea7437a8be0897d5efe9ee480818405338c7da88186fb3"}, - {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d8a38a44bb6a15810084316ed69c854a7c06e0c99c5429f1d664ad52cec353c"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a82746c6d6e91ca17e75f7f333ed41d70fce93af520a8437821dec3ee52dfb10"}, - {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:566bebdbe6bc0ac593fa0f67d62febbad9f8be5433f686dc56401ba4aab034e3"}, - {file = "pydantic-1.10.19-cp37-cp37m-win_amd64.whl", hash = "sha256:22a1794e01591884741be56c6fba157c4e99dcc9244beb5a87bd4aa54b84ea8b"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:076c49e24b73d346c45f9282d00dbfc16eef7ae27c970583d499f11110d9e5b0"}, - {file = "pydantic-1.10.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d4320510682d5a6c88766b2a286d03b87bd3562bf8d78c73d63bab04b21e7b4"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e66aa0fa7f8aa9d0a620361834f6eb60d01d3e9cea23ca1a92cda99e6f61dac"}, - {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d216f8d0484d88ab72ab45d699ac669fe031275e3fa6553e3804e69485449fa0"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f28a81978e936136c44e6a70c65bde7548d87f3807260f73aeffbf76fb94c2f"}, - {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3449633c207ec3d2d672eedb3edbe753e29bd4e22d2e42a37a2c1406564c20f"}, - {file = "pydantic-1.10.19-cp38-cp38-win_amd64.whl", hash = "sha256:7ea24e8614f541d69ea72759ff635df0e612b7dc9d264d43f51364df310081a3"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:573254d844f3e64093f72fcd922561d9c5696821ff0900a0db989d8c06ab0c25"}, - {file = "pydantic-1.10.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff09600cebe957ecbb4a27496fe34c1d449e7957ed20a202d5029a71a8af2e35"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4739c206bfb6bb2bdc78dcd40bfcebb2361add4ceac6d170e741bb914e9eff0f"}, - {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfb5b378b78229119d66ced6adac2e933c67a0aa1d0a7adffbe432f3ec14ce4"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f31742c95e3f9443b8c6fa07c119623e61d76603be9c0d390bcf7e888acabcb"}, - {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6444368b651a14c2ce2fb22145e1496f7ab23cbdb978590d47c8d34a7bc0289"}, - {file = "pydantic-1.10.19-cp39-cp39-win_amd64.whl", hash = "sha256:945407f4d08cd12485757a281fca0e5b41408606228612f421aa4ea1b63a095d"}, - {file = "pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f"}, - {file = "pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] -typing-extensions = ">=4.2.0" +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" [package.extras] -dotenv = ["python-dotenv (>=0.10.4)"] -email = ["email-validator (>=1.0.3)"] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"pydantic\"" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydocstyle" @@ -1071,6 +1219,7 @@ version = "6.3.0" description = "Python docstring style checker" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, @@ -1088,6 +1237,7 @@ version = "24.11.1" description = "API doc generator." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pydoctor-24.11.1-py3-none-any.whl", hash = "sha256:eb180c1a784380bd5fcfa2f3b190608eb8be6d8a3647f8e382ec78a53b5248d5"}, {file = "pydoctor-24.11.1.tar.gz", hash = "sha256:5e02aba4d15d3f7c4e1509aa9e2bf0abf1b5912721ac434b0cec0ec7909f4f83"}, @@ -1117,6 +1267,7 @@ version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, @@ -1131,6 +1282,7 @@ version = "1.1.377" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pyright-1.1.377-py3-none-any.whl", hash = "sha256:af0dd2b6b636c383a6569a083f8c5a8748ae4dcde5df7914b3f3f267e14dd162"}, {file = "pyright-1.1.377.tar.gz", hash = "sha256:aabc30fedce0ded34baa0c49b24f10e68f4bfc8f68ae7f3d175c4b0f256b4fcf"}, @@ -1149,6 +1301,7 @@ version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, @@ -1171,6 +1324,7 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -1189,6 +1343,7 @@ version = "2.2.0" description = "pytest plugin to abort hanging tests" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, @@ -1203,6 +1358,8 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +markers = "python_version < \"3.11\"" files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1217,6 +1374,8 @@ version = "0.2.0" description = "" optional = false python-versions = "*" +groups = ["dev"] +markers = "sys_platform == \"win32\"" files = [ {file = "pywin32-ctypes-0.2.0.tar.gz", hash = "sha256:24ffc3b341d457d48e8922352130cf2644024a4ff09762a2261fd34c36ee5942"}, {file = "pywin32_ctypes-0.2.0-py2.py3-none-any.whl", hash = "sha256:9dc2d991b3479cc2df15930958b674a48a227d5361d413827a4cfd0b5876fc98"}, @@ -1228,6 +1387,7 @@ version = "37.1" description = "readme_renderer is a library for rendering \"readme\" descriptions for Warehouse" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "readme_renderer-37.1-py3-none-any.whl", hash = "sha256:16c914ca7731fd062a316a2a8e5434a175ee34661a608af771a60c881f528a34"}, {file = "readme_renderer-37.1.tar.gz", hash = "sha256:96768c069729f69176f514477e57f2f8cd543fbb2cd7bad372976249fa509a0c"}, @@ -1247,6 +1407,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1268,6 +1429,7 @@ version = "0.9.1" description = "A utility belt for advanced users of python-requests" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, @@ -1282,6 +1444,7 @@ version = "2.0.0" description = "Validating URI References per RFC 3986" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, @@ -1296,6 +1459,7 @@ version = "12.5.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.6.3,<4.0.0" +groups = ["dev"] files = [ {file = "rich-12.5.1-py3-none-any.whl", hash = "sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb"}, {file = "rich-12.5.1.tar.gz", hash = "sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca"}, @@ -1314,6 +1478,7 @@ version = "0.5.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.5.0-py3-none-linux_armv6l.whl", hash = "sha256:ee770ea8ab38918f34e7560a597cc0a8c9a193aaa01bfbd879ef43cb06bd9c4c"}, {file = "ruff-0.5.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:38f3b8327b3cb43474559d435f5fa65dacf723351c159ed0dc567f7ab735d1b6"}, @@ -1341,6 +1506,8 @@ version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" optional = false python-versions = ">=3.6" +groups = ["dev"] +markers = "sys_platform == \"linux\"" files = [ {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, @@ -1356,6 +1523,7 @@ version = "2.10.0" description = "A library implementing the 'SemVer' scheme." optional = false python-versions = ">=2.7" +groups = ["dev"] files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, @@ -1371,6 +1539,7 @@ version = "68.2.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, @@ -1387,6 +1556,7 @@ version = "1.5.2" description = "Setuptools Rust extension plugin" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "setuptools-rust-1.5.2.tar.gz", hash = "sha256:d8daccb14dc0eae1b6b6eb3ecef79675bd37b4065369f79c35393dd5c55652c7"}, {file = "setuptools_rust-1.5.2-py3-none-any.whl", hash = "sha256:8eb45851e34288f2296cd5ab9e924535ac1757318b730a13fe6836867843f206"}, @@ -1403,10 +1573,12 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main", "dev"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +markers = {main = "python_version < \"3.11\""} [[package]] name = "snowballstemmer" @@ -1414,6 +1586,7 @@ version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -1425,6 +1598,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["dev"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -1436,6 +1610,8 @@ version = "2.0.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version < \"3.11\"" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, @@ -1447,6 +1623,7 @@ version = "4.0.1" description = "Collection of utilities for publishing packages on PyPI" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "twine-4.0.1-py3-none-any.whl", hash = "sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e"}, {file = "twine-4.0.1.tar.gz", hash = "sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0"}, @@ -1469,6 +1646,7 @@ version = "24.11.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "twisted-24.11.0-py3-none-any.whl", hash = "sha256:fe403076c71f04d5d2d789a755b687c5637ec3bcd3b2b8252d76f2ba65f54261"}, {file = "twisted-24.11.0.tar.gz", hash = "sha256:695d0556d5ec579dcc464d2856b634880ed1319f45b10d19043f2b57eb0115b5"}, @@ -1504,6 +1682,7 @@ version = "4.21.0.6" description = "Typing stubs for protobuf" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "types-protobuf-4.21.0.6.tar.gz", hash = "sha256:8c105b906569e9d53ba033465880d9ef17a59bf3ba8ab656d24c9eadb9d8a056"}, {file = "types_protobuf-4.21.0.6-py3-none-any.whl", hash = "sha256:39167012ead0bc5920b6322a1e4dc2d088f66a34b84cce39bb88500e49ac955a"}, @@ -1515,6 +1694,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -1526,6 +1706,7 @@ version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, @@ -1543,6 +1724,7 @@ version = "0.5.1" description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -1554,6 +1736,7 @@ version = "0.42.0" description = "A built-package format for Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, @@ -1568,6 +1751,8 @@ version = "1.14.1" description = "Module for decorators, wrappers and monkey patching." optional = true python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +groups = ["main"] +markers = "extra == \"opentelemetry\"" files = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, @@ -1651,6 +1836,7 @@ version = "3.8.1" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, @@ -1666,6 +1852,7 @@ version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, @@ -1717,8 +1904,9 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] grpc = ["grpcio"] opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] +pydantic = ["pydantic"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.9" -content-hash = "63c0f8bf444280248b4d756f5baadd98b663968b8ec72a92150e0009000f0638" +content-hash = "595afb046373e5c826930cf0f1bc112f70cbfae14c72775c33221e96d66fc869" diff --git a/pyproject.toml b/pyproject.toml index ee3b6cec3..740a03850 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ grpcio = {version = "^1.48.2", optional = true} opentelemetry-api = { version = "^1.11.1", optional = true } opentelemetry-sdk = { version = "^1.11.1", optional = true } protobuf = ">=3.20" +pydantic = { version = "^2.10.6", optional = true } python = "^3.9" python-dateutil = { version = "^2.8.2", python = "<3.11" } types-protobuf = ">=3.20" @@ -46,7 +47,6 @@ grpcio-tools = "^1.48.2" mypy = "^1.0.0" mypy-protobuf = "^3.3.0" psutil = "^5.9.3" -pydantic = "^1.10.19" pydocstyle = "^6.3.0" pydoctor = "^24.11.1" pyright = ">=1.1.377" @@ -63,6 +63,7 @@ wheel = "^0.42.0" [tool.poetry.extras] opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] grpc = ["grpcio"] +pydantic = ["pydantic"] [tool.poetry.group.dev.dependencies] ruff = "^0.5.0" From e8b6f013f505a948af9b12a57de082de60d094dd Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 31 Jan 2025 08:13:49 -0500 Subject: [PATCH 04/96] Drive-by: use zip_longest --- temporalio/converter.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index 61037dc3b..f20e8543f 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -15,6 +15,7 @@ from dataclasses import dataclass from datetime import datetime from enum import IntEnum +from itertools import zip_longest from typing import ( Any, Awaitable, @@ -291,10 +292,8 @@ def from_payloads( RuntimeError: Error during decode """ values = [] - for index, payload in enumerate(payloads): - type_hint = None - if type_hints and len(type_hints) > index: - type_hint = type_hints[index] + type_hints = type_hints or [] + for index, (payload, type_hint) in enumerate(zip_longest(payloads, type_hints)): # Raw value should just wrap if type_hint == temporalio.common.RawValue: values.append(temporalio.common.RawValue(payload)) From 33b79804dd9c6c6ae8470c6394c5743e8a0a6962 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 31 Jan 2025 08:18:27 -0500 Subject: [PATCH 05/96] Get rid of pydantic sandbox hack --- tests/contrib/test_pydantic.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 7873ddf44..e9fe69539 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,4 +1,3 @@ -import dataclasses import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address @@ -10,10 +9,6 @@ from temporalio.client import Client from temporalio.contrib.pydantic.converter import pydantic_data_converter from temporalio.worker import Worker -from temporalio.worker.workflow_sandbox import ( - SandboxedWorkflowRunner, - SandboxRestrictions, -) class MyPydanticModel(BaseModel): @@ -37,29 +32,6 @@ async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: ) -# Due to known issues with Pydantic's use of issubclass and our inability to -# override the check in sandbox, Pydantic will think datetime is actually date -# in the sandbox. At the expense of protecting against datetime.now() use in -# workflows, we're going to remove datetime module restrictions. See sdk-python -# README's discussion of known sandbox issues for more details. -def new_sandbox_runner() -> SandboxedWorkflowRunner: - # TODO(cretz): Use with_child_unrestricted when https://github.com/temporalio/sdk-python/issues/254 - # is fixed and released - invalid_module_member_children = dict( - SandboxRestrictions.invalid_module_members_default.children - ) - del invalid_module_member_children["datetime"] - return SandboxedWorkflowRunner( - restrictions=dataclasses.replace( - SandboxRestrictions.default, - invalid_module_members=dataclasses.replace( - SandboxRestrictions.invalid_module_members_default, - children=invalid_module_member_children, - ), - ) - ) - - async def test_workflow_with_pydantic_model(client: Client): # Replace data converter in client new_config = client.config() @@ -83,7 +55,6 @@ async def test_workflow_with_pydantic_model(client: Client): task_queue=task_queue_name, workflows=[MyWorkflow], activities=[my_activity], - workflow_runner=new_sandbox_runner(), ): result = await client.execute_workflow( MyWorkflow.run, From 7af32c1fc16b3dce8676caf9512545e14bde0856 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 2 Feb 2025 20:55:13 -0500 Subject: [PATCH 06/96] Expand test coverage --- tests/contrib/test_pydantic.py | 61 +++++++++++++++++++++++++++++----- 1 file changed, 53 insertions(+), 8 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index e9fe69539..72d6761b9 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,19 +1,38 @@ import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address -from typing import List +from typing import Annotated, Any, List, Sequence, TypeVar -from pydantic import BaseModel +from annotated_types import Len +from pydantic import BaseModel, Field, WithJsonSchema from temporalio import activity, workflow from temporalio.client import Client from temporalio.contrib.pydantic.converter import pydantic_data_converter from temporalio.worker import Worker +SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) +ShortSequence = Annotated[SequenceType, Len(max_length=2)] + class MyPydanticModel(BaseModel): - some_ip: IPv4Address - some_date: datetime + ip_field: IPv4Address + datetime_field: datetime + string_field_assigned_field: str = Field() + datetime_field_assigned_field: datetime = Field() + string_field_with_default: str = Field(default_factory=lambda: "my-string") + datetime_field_with_default: datetime = Field( + default_factory=lambda: datetime(2000, 1, 2, 3, 4, 5) + ) + annotated_datetime: Annotated[datetime, Field(), WithJsonSchema({"extra": "data"})] + annotated_list_of_str: Annotated[ + List[str], Field(), WithJsonSchema({"extra": "data"}) + ] + annotated_list_of_datetime: Annotated[ + List[datetime], Field(), WithJsonSchema({"extra": "data"}) + ] + str_short_sequence: ShortSequence[List[str]] + datetime_short_sequence: ShortSequence[List[datetime]] @activity.defn @@ -41,12 +60,38 @@ async def test_workflow_with_pydantic_model(client: Client): orig_models = [ MyPydanticModel( - some_ip=IPv4Address("127.0.0.1"), - some_date=datetime(2000, 1, 2, 3, 4, 5), + ip_field=IPv4Address("127.0.0.1"), + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + string_field_assigned_field="my-string", + datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), + annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), + annotated_list_of_str=["my-string-1", "my-string-2"], + annotated_list_of_datetime=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ], + str_short_sequence=["my-string-1", "my-string-2"], + datetime_short_sequence=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ], ), MyPydanticModel( - some_ip=IPv4Address("127.0.0.2"), - some_date=datetime(2001, 2, 3, 4, 5, 6), + ip_field=IPv4Address("127.0.0.2"), + datetime_field=datetime(2001, 2, 3, 4, 5, 6), + string_field_assigned_field="my-string", + datetime_field_assigned_field=datetime(2000, 2, 3, 4, 5, 6), + annotated_datetime=datetime(2001, 2, 3, 4, 5, 6), + annotated_list_of_str=["my-string-3", "my-string-4"], + annotated_list_of_datetime=[ + datetime(2001, 2, 3, 4, 5, 6), + datetime(2001, 12, 13, 14, 15, 16), + ], + str_short_sequence=["my-string-3", "my-string-4"], + datetime_short_sequence=[ + datetime(2001, 2, 3, 4, 5, 6), + datetime(2001, 12, 13, 14, 15, 16), + ], ), ] From f07137301ec009bcd6541116d23f2a05073d123c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 2 Feb 2025 20:55:33 -0500 Subject: [PATCH 07/96] Implement type converter and __get_pydantic_core_schema__ --- temporalio/contrib/pydantic/converter.py | 54 +++++++++++++++++-- temporalio/converter.py | 15 +----- .../worker/workflow_sandbox/_restrictions.py | 13 +++++ 3 files changed, 64 insertions(+), 18 deletions(-) diff --git a/temporalio/contrib/pydantic/converter.py b/temporalio/contrib/pydantic/converter.py index a3c1cee66..01a5f9fef 100644 --- a/temporalio/contrib/pydantic/converter.py +++ b/temporalio/contrib/pydantic/converter.py @@ -1,23 +1,37 @@ +import inspect import json -from typing import Any, Optional +from typing import ( + Any, + Optional, + Type, +) +import pydantic from pydantic.json import pydantic_encoder + +import temporalio.workflow from temporalio.api.common.v1 import Payload from temporalio.converter import ( CompositePayloadConverter, DataConverter, DefaultPayloadConverter, JSONPlainPayloadConverter, + JSONTypeConverter, ) +from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext class PydanticJSONPayloadConverter(JSONPlainPayloadConverter): """Pydantic JSON payload converter. - This extends the :py:class:`JSONPlainPayloadConverter` to override - :py:meth:`to_payload` using the Pydantic encoder. + Extends :py:class:`JSONPlainPayloadConverter` to override :py:meth:`to_payload` using + the Pydantic encoder. :py:meth:`from_payload` uses the parent implementation, with a + custom type converter. """ + def __init__(self) -> None: + super().__init__(custom_type_converters=[PydanticModelTypeConverter()]) + def to_payload(self, value: Any) -> Optional[Payload]: """Convert all values with Pydantic encoder or fail. @@ -25,7 +39,7 @@ def to_payload(self, value: Any) -> Optional[Payload]: converter is expected to be the last in the chain, so it can fail if unable to convert. """ - # We let JSON conversion errors be thrown to caller + # Let JSON conversion errors be thrown to caller return Payload( metadata={"encoding": self.encoding.encode()}, data=json.dumps( @@ -34,17 +48,47 @@ def to_payload(self, value: Any) -> Optional[Payload]: ) +class PydanticModelTypeConverter(JSONTypeConverter): + def to_typed_value(self, hint: Type, value: Any) -> Any: + if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): + return JSONTypeConverter.Unhandled + model = hint + if not isinstance(value, dict): + raise TypeError( + f"Cannot convert to {model}, value is {type(value)} not dict" + ) + if temporalio.workflow.unsafe.in_sandbox(): + # Unwrap proxied model field types so that Pydantic can call their constructors + model = pydantic.create_model( + model.__name__, + **{ # type: ignore + name: (RestrictionContext.unwrap_if_proxied(f.annotation), f) + for name, f in model.model_fields.items() + }, + ) + if hasattr(model, "model_validate"): + return model.model_validate(value) + elif hasattr(model, "parse_obj"): + # Pydantic v1 + return model.parse_obj(value) + else: + raise ValueError( + f"{model} is a Pydantic model but does not have a `model_validate` or `parse_obj` method" + ) + + class PydanticPayloadConverter(CompositePayloadConverter): """Payload converter that replaces Temporal JSON conversion with Pydantic JSON conversion. """ def __init__(self) -> None: + json_payload_converter = PydanticJSONPayloadConverter() super().__init__( *( c if not isinstance(c, JSONPlainPayloadConverter) - else PydanticJSONPayloadConverter() + else json_payload_converter for c in DefaultPayloadConverter.default_encoding_payload_converters ) ) diff --git a/temporalio/converter.py b/temporalio/converter.py index f20e8543f..0c07c4024 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -558,9 +558,10 @@ def encoding(self) -> str: def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" # Check for pydantic then send warning + # TODO (dan): update if hasattr(value, "parse_obj"): warnings.warn( - "If you're using pydantic model, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter for better support" + "If you're using a pydantic model, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter for better support" ) # We let JSON conversion errors be thrown to caller return temporalio.api.common.v1.Payload( @@ -1522,18 +1523,6 @@ def value_to_type( # TODO(cretz): Want way to convert snake case to camel case? return hint(**field_values) - # If there is a @staticmethod or @classmethod parse_obj, we will use it. - # This covers Pydantic models. - parse_obj_attr = inspect.getattr_static(hint, "parse_obj", None) - if isinstance(parse_obj_attr, classmethod) or isinstance( - parse_obj_attr, staticmethod - ): - if not isinstance(value, dict): - raise TypeError( - f"Cannot convert to {hint}, value is {type(value)} not dict" - ) - return getattr(hint, "parse_obj")(value) - # IntEnum if inspect.isclass(hint) and issubclass(hint, IntEnum): if not isinstance(value, int): diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 407e51b27..73e439883 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -31,6 +31,9 @@ cast, ) +from pydantic import GetCoreSchemaHandler +from pydantic_core import CoreSchema, core_schema + import temporalio.workflow logger = logging.getLogger(__name__) @@ -948,6 +951,14 @@ def _is_restrictable(v: Any) -> bool: class _RestrictedProxy: + @classmethod + def __get_pydantic_core_schema__( + cls, source_type: Any, handler: GetCoreSchemaHandler + ) -> CoreSchema: + return core_schema.no_info_after_validator_function( + cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) + ) + def __init__(self, *args, **kwargs) -> None: # When we instantiate this class, we have the signature of: # __init__( @@ -971,6 +982,8 @@ def __init__(self, *args, **kwargs) -> None: _trace("__init__ unrecognized with args %s", args) def __getattribute__(self, __name: str) -> Any: + if __name == "__get_pydantic_core_schema__": + return object.__getattribute__(self, "__get_pydantic_core_schema__") state = _RestrictionState.from_proxy(self) _trace("__getattribute__ %s on %s", __name, state.name) # We do not restrict __spec__ or __name__ From 32b74da9496b1f7832efca950d0069927206e398 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 10:30:43 -0500 Subject: [PATCH 08/96] Rename --- .../{pydantic/converter.py => pydantic.py} | 89 ++++++++++++------- tests/contrib/test_pydantic.py | 2 +- 2 files changed, 57 insertions(+), 34 deletions(-) rename temporalio/contrib/{pydantic/converter.py => pydantic.py} (65%) diff --git a/temporalio/contrib/pydantic/converter.py b/temporalio/contrib/pydantic.py similarity index 65% rename from temporalio/contrib/pydantic/converter.py rename to temporalio/contrib/pydantic.py index 01a5f9fef..20002c868 100644 --- a/temporalio/contrib/pydantic/converter.py +++ b/temporalio/contrib/pydantic.py @@ -1,3 +1,16 @@ +"""A data converter for Pydantic models + +To use, pass ``pydantic_data_converter`` as the ``data_converter`` argument to +:py:class:`temporalio.client.Client`: + +.. code-block:: python + + client = Client( + data_converter=pydantic_data_converter, + ... + ) +""" + import inspect import json from typing import ( @@ -21,34 +34,7 @@ from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext -class PydanticJSONPayloadConverter(JSONPlainPayloadConverter): - """Pydantic JSON payload converter. - - Extends :py:class:`JSONPlainPayloadConverter` to override :py:meth:`to_payload` using - the Pydantic encoder. :py:meth:`from_payload` uses the parent implementation, with a - custom type converter. - """ - - def __init__(self) -> None: - super().__init__(custom_type_converters=[PydanticModelTypeConverter()]) - - def to_payload(self, value: Any) -> Optional[Payload]: - """Convert all values with Pydantic encoder or fail. - - Like the base class, we fail if we cannot convert. This payload - converter is expected to be the last in the chain, so it can fail if - unable to convert. - """ - # Let JSON conversion errors be thrown to caller - return Payload( - metadata={"encoding": self.encoding.encode()}, - data=json.dumps( - value, separators=(",", ":"), sort_keys=True, default=pydantic_encoder - ).encode(), - ) - - -class PydanticModelTypeConverter(JSONTypeConverter): +class _PydanticModelTypeConverter(JSONTypeConverter): def to_typed_value(self, hint: Type, value: Any) -> Any: if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): return JSONTypeConverter.Unhandled @@ -77,13 +63,47 @@ def to_typed_value(self, hint: Type, value: Any) -> Any: ) -class PydanticPayloadConverter(CompositePayloadConverter): - """Payload converter that replaces Temporal JSON conversion with Pydantic +class _PydanticJSONPayloadConverter(JSONPlainPayloadConverter): + """Pydantic JSON payload converter. + + Conversion to JSON is implemented by overriding :py:meth:`to_payload` to use the + Pydantic encoder. + + Conversion from JSON uses the parent implementation of :py:meth:`from_payload`, with a + custom type converter. The parent implementation of :py:meth:`from_payload` traverses + the JSON document according to the structure specified by the type annotation; the + custom type converter ensures that, during this traversal, Pydantic model instances + will be created as specified by the type annotation. + """ + + def __init__(self) -> None: + super().__init__(custom_type_converters=[_PydanticModelTypeConverter()]) + + def to_payload(self, value: Any) -> Optional[Payload]: + """Convert all values with Pydantic encoder or fail. + + Like the base class, we fail if we cannot convert. This payload + converter is expected to be the last in the chain, so it can fail if + unable to convert. + """ + # Let JSON conversion errors be thrown to caller + return Payload( + metadata={"encoding": self.encoding.encode()}, + data=json.dumps( + value, separators=(",", ":"), sort_keys=True, default=pydantic_encoder + ).encode(), + ) + + +class _PydanticPayloadConverter(CompositePayloadConverter): + """Pydantic payload converter. + + Payload converter that replaces the default JSON conversion with Pydantic JSON conversion. """ def __init__(self) -> None: - json_payload_converter = PydanticJSONPayloadConverter() + json_payload_converter = _PydanticJSONPayloadConverter() super().__init__( *( c @@ -95,6 +115,9 @@ def __init__(self) -> None: pydantic_data_converter = DataConverter( - payload_converter_class=PydanticPayloadConverter + payload_converter_class=_PydanticPayloadConverter ) -"""Data converter using Pydantic JSON conversion.""" +"""Data converter for Pydantic models. + +To use, pass this as the ``data_converter`` argument to :py:class:`temporalio.client.Client` +""" diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 72d6761b9..139749967 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -8,7 +8,7 @@ from temporalio import activity, workflow from temporalio.client import Client -from temporalio.contrib.pydantic.converter import pydantic_data_converter +from temporalio.contrib.pydantic import pydantic_data_converter from temporalio.worker import Worker SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) From 24f89de2c7fc63e76197589f6bee997344868b6c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 10:36:32 -0500 Subject: [PATCH 09/96] Clean up --- temporalio/converter.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index 0c07c4024..1463eeba2 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -10,7 +10,6 @@ import sys import traceback import uuid -import warnings from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime @@ -557,13 +556,7 @@ def encoding(self) -> str: def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" - # Check for pydantic then send warning - # TODO (dan): update - if hasattr(value, "parse_obj"): - warnings.warn( - "If you're using a pydantic model, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter for better support" - ) - # We let JSON conversion errors be thrown to caller + # Let JSON conversion errors be thrown to caller return temporalio.api.common.v1.Payload( metadata={"encoding": self._encoding.encode()}, data=json.dumps( From cc1d8f51653bf3f3e37a75d5019eaa2f646ed389 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 10:36:53 -0500 Subject: [PATCH 10/96] Extend test coverage --- tests/contrib/test_pydantic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 139749967..e28654b8e 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -51,8 +51,7 @@ async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: ) -async def test_workflow_with_pydantic_model(client: Client): - # Replace data converter in client +async def test_field_conversion(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) From adea382d7f7cbfc3feda6129d845c82d4a623cb3 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 10:57:42 -0500 Subject: [PATCH 11/96] Add test of mixed type inputs --- tests/contrib/test_pydantic.py | 119 ++++++++++++++++++++++++--------- 1 file changed, 89 insertions(+), 30 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index e28654b8e..f918ab384 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,7 +1,8 @@ +import dataclasses import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, TypeVar +from typing import Annotated, Any, List, Sequence, Tuple, TypeVar from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -35,29 +36,8 @@ class MyPydanticModel(BaseModel): datetime_short_sequence: ShortSequence[List[datetime]] -@activity.defn -async def my_activity(models: List[MyPydanticModel]) -> List[MyPydanticModel]: - activity.logger.info("Got models in activity: %s" % models) - return models - - -@workflow.defn -class MyWorkflow: - @workflow.run - async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: - workflow.logger.info("Got models in workflow: %s" % models) - return await workflow.execute_activity( - my_activity, models, start_to_close_timeout=timedelta(minutes=1) - ) - - -async def test_field_conversion(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_models = [ +def make_pydantic_objects() -> List[MyPydanticModel]: + return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), datetime_field=datetime(2000, 1, 2, 3, 4, 5), @@ -94,16 +74,95 @@ async def test_field_conversion(client: Client): ), ] + +@activity.defn +async def list_of_pydantic_models_activity( + models: List[MyPydanticModel], +) -> List[MyPydanticModel]: + return models + + +@workflow.defn +class ListOfPydanticObjectsWorkflow: + @workflow.run + async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: + return await workflow.execute_activity( + list_of_pydantic_models_activity, + models, + start_to_close_timeout=timedelta(minutes=1), + ) + + +async def test_field_conversion(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_pydantic_objects = make_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[ListOfPydanticObjectsWorkflow], + activities=[list_of_pydantic_models_activity], + ): + round_tripped_pydantic_objects = await client.execute_workflow( + ListOfPydanticObjectsWorkflow.run, + orig_pydantic_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert orig_pydantic_objects == round_tripped_pydantic_objects + + +@dataclasses.dataclass +class MyDataClass: + int_field: int + + +def make_dataclass_objects() -> List[MyDataClass]: + return [MyDataClass(int_field=7)] + + +@workflow.defn +class MixedCollectionTypesWorkflow: + @workflow.run + async def run( + self, input: Tuple[List[MyDataClass], List[MyPydanticModel]] + ) -> Tuple[List[MyDataClass], List[MyPydanticModel]]: + data_classes, pydantic_objects = input + pydantic_objects = await workflow.execute_activity( + list_of_pydantic_models_activity, + pydantic_objects, + start_to_close_timeout=timedelta(minutes=1), + ) + return data_classes, pydantic_objects + + +async def test_mixed_collection_types(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_dataclass_objects = make_dataclass_objects() + orig_pydantic_objects = make_pydantic_objects() + async with Worker( client, task_queue=task_queue_name, - workflows=[MyWorkflow], - activities=[my_activity], + workflows=[MixedCollectionTypesWorkflow], + activities=[list_of_pydantic_models_activity], ): - result = await client.execute_workflow( - MyWorkflow.run, - orig_models, + ( + round_tripped_dataclass_objects, + round_tripped_pydantic_objects, + ) = await client.execute_workflow( + MixedCollectionTypesWorkflow.run, + (orig_dataclass_objects, orig_pydantic_objects), id=str(uuid.uuid4()), task_queue=task_queue_name, ) - assert orig_models == result + assert orig_dataclass_objects == round_tripped_dataclass_objects + assert orig_pydantic_objects == round_tripped_pydantic_objects From 0a9e3f699f541b1786bdb3b4036be09ac267f94c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 16:30:51 -0500 Subject: [PATCH 12/96] Use v2 API to_jsonable_python --- temporalio/contrib/pydantic.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 20002c868..3d84b399c 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -20,7 +20,6 @@ ) import pydantic -from pydantic.json import pydantic_encoder import temporalio.workflow from temporalio.api.common.v1 import Payload @@ -33,6 +32,11 @@ ) from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext +try: + from pydantic_core import to_jsonable_python +except ImportError: + from pydantic.json import pydantic_encoder as to_jsonable_python + class _PydanticModelTypeConverter(JSONTypeConverter): def to_typed_value(self, hint: Type, value: Any) -> Any: @@ -90,7 +94,7 @@ def to_payload(self, value: Any) -> Optional[Payload]: return Payload( metadata={"encoding": self.encoding.encode()}, data=json.dumps( - value, separators=(",", ":"), sort_keys=True, default=pydantic_encoder + value, separators=(",", ":"), sort_keys=True, default=to_jsonable_python ).encode(), ) From 3c0a857f695c7f920a621cb1dbea17a9ef2ea79a Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 20:44:23 -0500 Subject: [PATCH 13/96] Use JSONEncoder --- temporalio/contrib/pydantic.py | 63 +++++++++++----------------------- 1 file changed, 20 insertions(+), 43 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 3d84b399c..d329bcd0b 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -12,18 +12,22 @@ """ import inspect -import json from typing import ( Any, - Optional, Type, ) import pydantic +try: + from pydantic_core import to_jsonable_python +except ImportError: + # pydantic v1 + from pydantic.json import pydantic_encoder as to_jsonable_python + import temporalio.workflow -from temporalio.api.common.v1 import Payload from temporalio.converter import ( + AdvancedJSONEncoder, CompositePayloadConverter, DataConverter, DefaultPayloadConverter, @@ -32,13 +36,8 @@ ) from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext -try: - from pydantic_core import to_jsonable_python -except ImportError: - from pydantic.json import pydantic_encoder as to_jsonable_python - -class _PydanticModelTypeConverter(JSONTypeConverter): +class PydanticModelTypeConverter(JSONTypeConverter): def to_typed_value(self, hint: Type, value: Any) -> Any: if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): return JSONTypeConverter.Unhandled @@ -59,7 +58,7 @@ def to_typed_value(self, hint: Type, value: Any) -> Any: if hasattr(model, "model_validate"): return model.model_validate(value) elif hasattr(model, "parse_obj"): - # Pydantic v1 + # pydantic v1 return model.parse_obj(value) else: raise ValueError( @@ -67,39 +66,14 @@ def to_typed_value(self, hint: Type, value: Any) -> Any: ) -class _PydanticJSONPayloadConverter(JSONPlainPayloadConverter): - """Pydantic JSON payload converter. - - Conversion to JSON is implemented by overriding :py:meth:`to_payload` to use the - Pydantic encoder. - - Conversion from JSON uses the parent implementation of :py:meth:`from_payload`, with a - custom type converter. The parent implementation of :py:meth:`from_payload` traverses - the JSON document according to the structure specified by the type annotation; the - custom type converter ensures that, during this traversal, Pydantic model instances - will be created as specified by the type annotation. - """ - - def __init__(self) -> None: - super().__init__(custom_type_converters=[_PydanticModelTypeConverter()]) - - def to_payload(self, value: Any) -> Optional[Payload]: - """Convert all values with Pydantic encoder or fail. - - Like the base class, we fail if we cannot convert. This payload - converter is expected to be the last in the chain, so it can fail if - unable to convert. - """ - # Let JSON conversion errors be thrown to caller - return Payload( - metadata={"encoding": self.encoding.encode()}, - data=json.dumps( - value, separators=(",", ":"), sort_keys=True, default=to_jsonable_python - ).encode(), - ) +class PydanticJSONEncoder(AdvancedJSONEncoder): + def default(self, o: Any) -> Any: + if isinstance(o, pydantic.BaseModel): + return to_jsonable_python(o) + return super().default(o) -class _PydanticPayloadConverter(CompositePayloadConverter): +class PydanticPayloadConverter(CompositePayloadConverter): """Pydantic payload converter. Payload converter that replaces the default JSON conversion with Pydantic @@ -107,7 +81,10 @@ class _PydanticPayloadConverter(CompositePayloadConverter): """ def __init__(self) -> None: - json_payload_converter = _PydanticJSONPayloadConverter() + json_payload_converter = JSONPlainPayloadConverter( + encoder=PydanticJSONEncoder, + custom_type_converters=[PydanticModelTypeConverter()], + ) super().__init__( *( c @@ -119,7 +96,7 @@ def __init__(self) -> None: pydantic_data_converter = DataConverter( - payload_converter_class=_PydanticPayloadConverter + payload_converter_class=PydanticPayloadConverter ) """Data converter for Pydantic models. From b85354914ff8382680f2ba455de112a2e25bccc1 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 20:58:09 -0500 Subject: [PATCH 14/96] Cleanup --- temporalio/contrib/pydantic.py | 28 +++++++++++++++---- .../worker/workflow_sandbox/_restrictions.py | 17 +++++------ 2 files changed, 31 insertions(+), 14 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index d329bcd0b..eb307c7fc 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -23,7 +23,7 @@ from pydantic_core import to_jsonable_python except ImportError: # pydantic v1 - from pydantic.json import pydantic_encoder as to_jsonable_python + from pydantic.json import pydantic_encoder as to_jsonable_python # type: ignore import temporalio.workflow from temporalio.converter import ( @@ -36,9 +36,16 @@ ) from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext +# Note that in addition to the implementation in this module, _RestrictedProxy +# implements __get_pydantic_core_schema__ so that pydantic unwraps proxied types +# when determining the schema. + class PydanticModelTypeConverter(JSONTypeConverter): + """Type converter for pydantic model instances.""" + def to_typed_value(self, hint: Type, value: Any) -> Any: + """Convert dict value to pydantic model instance of the specified type""" if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): return JSONTypeConverter.Unhandled model = hint @@ -67,20 +74,29 @@ def to_typed_value(self, hint: Type, value: Any) -> Any: class PydanticJSONEncoder(AdvancedJSONEncoder): + """JSON encoder for python objects containing pydantic model instances.""" + def default(self, o: Any) -> Any: + """Convert object to jsonable python. + + See :py:meth:`json.JSONEncoder.default`. + """ if isinstance(o, pydantic.BaseModel): return to_jsonable_python(o) return super().default(o) class PydanticPayloadConverter(CompositePayloadConverter): - """Pydantic payload converter. + """Payload converter for payloads containing pydantic model instances. - Payload converter that replaces the default JSON conversion with Pydantic - JSON conversion. + JSON conversion is replaced with a converter that uses + :py:class:`PydanticJSONEncoder` to convert the python object to JSON, and + :py:class:`PydanticModelTypeConverter` to convert raw python values to + pydantic model instances. """ def __init__(self) -> None: + """Initialize object""" json_payload_converter = JSONPlainPayloadConverter( encoder=PydanticJSONEncoder, custom_type_converters=[PydanticModelTypeConverter()], @@ -98,7 +114,7 @@ def __init__(self) -> None: pydantic_data_converter = DataConverter( payload_converter_class=PydanticPayloadConverter ) -"""Data converter for Pydantic models. +"""Data converter for payloads containing pydantic model instances. -To use, pass this as the ``data_converter`` argument to :py:class:`temporalio.client.Client` +To use, pass as the ``data_converter`` argument of :py:class:`temporalio.client.Client` """ diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 73e439883..c1f3260fd 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -951,14 +951,6 @@ def _is_restrictable(v: Any) -> bool: class _RestrictedProxy: - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Any, handler: GetCoreSchemaHandler - ) -> CoreSchema: - return core_schema.no_info_after_validator_function( - cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) - ) - def __init__(self, *args, **kwargs) -> None: # When we instantiate this class, we have the signature of: # __init__( @@ -1033,6 +1025,15 @@ def __getitem__(self, key: Any) -> Any: ) return ret + # Instruct pydantic to use the proxied type when determining the schema + @classmethod + def __get_pydantic_core_schema__( + cls, source_type: Any, handler: GetCoreSchemaHandler + ) -> CoreSchema: + return core_schema.no_info_after_validator_function( + cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) + ) + __doc__ = _RestrictedProxyLookup( # type: ignore class_value=__doc__, fallback_func=lambda self: type(self).__doc__, is_attr=True ) From 4e95208e52f7a564e75cc5ba324c640da590269d Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 2 Feb 2025 20:55:24 -0500 Subject: [PATCH 15/96] README --- README.md | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 2b28d1e82..f4bf5f8e0 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,6 @@ informal introduction to the features and their implementation. - [Extending Restricted Classes](#extending-restricted-classes) - [Certain Standard Library Calls on Restricted Objects](#certain-standard-library-calls-on-restricted-objects) - [is_subclass of ABC-based Restricted Classes](#is_subclass-of-abc-based-restricted-classes) - - [Compiled Pydantic Sometimes Using Wrong Types](#compiled-pydantic-sometimes-using-wrong-types) - [Activities](#activities) - [Definition](#definition-1) - [Types of Activities](#types-of-activities) @@ -312,11 +311,6 @@ The default data converter supports converting multiple types including: * Anything that [`json.dump`](https://docs.python.org/3/library/json.html#json.dump) supports natively * [dataclasses](https://docs.python.org/3/library/dataclasses.html) * Iterables including ones JSON dump may not support by default, e.g. `set` - * Any class with a `dict()` method and a static `parse_obj()` method, e.g. - [Pydantic models](https://pydantic-docs.helpmanual.io/usage/models) - * The default data converter is deprecated for Pydantic models and will warn if used since not all fields work. - See [this sample](https://github.com/temporalio/samples-python/tree/main/pydantic_converter) for the recommended - approach. * [IntEnum, StrEnum](https://docs.python.org/3/library/enum.html) based enumerates * [UUID](https://docs.python.org/3/library/uuid.html) @@ -325,6 +319,14 @@ This notably doesn't include any `date`, `time`, or `datetime` objects as they m Users are strongly encouraged to use a single `dataclass` for parameter and return types so fields with defaults can be easily added without breaking compatibility. +To use pydantic model instances (or python objects containing pydantic model instances), use +```python +from temporalio.contrib.pydantic import pydantic_data_converter + +client = Client(data_converter=pydantic_data_converter, ...) +``` +Do not use pydantic's [strict mode](https://docs.pydantic.dev/latest/concepts/strict_mode/). + Classes with generics may not have the generics properly resolved. The current implementation does not have generic type resolution. Users should use concrete types. @@ -1133,15 +1135,6 @@ Due to [https://bugs.python.org/issue44847](https://bugs.python.org/issue44847), checked to see if they are subclasses of another via `is_subclass` may fail (see also [this wrapt issue](https://github.com/GrahamDumpleton/wrapt/issues/130)). -###### Compiled Pydantic Sometimes Using Wrong Types - -If the Pydantic dependency is in compiled form (the default) and you are using a Pydantic model inside a workflow -sandbox that uses a `datetime` type, it will grab the wrong validator and use `date` instead. This is because our -patched form of `issubclass` is bypassed by compiled Pydantic. - -To work around, either don't use `datetime`-based Pydantic model fields in workflows, or mark `datetime` library as -passthrough (means you lose protection against calling the non-deterministic `now()`), or use non-compiled Pydantic -dependency. ### Activities From e769fe8a49caa0281dda21a63cff64ec56e391d6 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 21:29:19 -0500 Subject: [PATCH 16/96] Retain hack for backwards compatiblity --- temporalio/converter.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/temporalio/converter.py b/temporalio/converter.py index 1463eeba2..9e3e929e6 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -1516,6 +1516,23 @@ def value_to_type( # TODO(cretz): Want way to convert snake case to camel case? return hint(**field_values) + # Pydantic model instance + # Pydantic users should use + # temporalio.contrib.pydantic.pydantic_data_converter, in which case a + # pydantic model instance will have been handled by the custom_converters at + # the start of this function. We retain the following for backwards + # compatibility with pydantic users who are not using contrib.pydantic, but + # this is deprecated. + parse_obj_attr = inspect.getattr_static(hint, "parse_obj", None) + if isinstance(parse_obj_attr, classmethod) or isinstance( + parse_obj_attr, staticmethod + ): + if not isinstance(value, dict): + raise TypeError( + f"Cannot convert to {hint}, value is {type(value)} not dict" + ) + return getattr(hint, "parse_obj")(value) + # IntEnum if inspect.isclass(hint) and issubclass(hint, IntEnum): if not isinstance(value, int): From aa4affb91996c182f883a9a0eb31b6ed9e95ffe8 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 23:06:05 -0500 Subject: [PATCH 17/96] Hack: unbreak instantiation of pydantic models --- .../worker/workflow_sandbox/_restrictions.py | 31 +++++++++++++------ 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index c1f3260fd..5c4c2a3c8 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -951,21 +951,32 @@ def _is_restrictable(v: Any) -> bool: class _RestrictedProxy: + # When we instantiate this class, we have the signature of: + # __init__( + # self, + # name: str, + # obj: Any, + # context: RestrictionContext, + # matcher: SandboxMatcher + # ) + # However there are some edge cases; see comments below. + + def __new__(cls, *args, **kwargs) -> Any: + # When pydantic instantiates a model containing a field whose type + # annotation is proxied, it attempts to call the field type constructor + # with the field value but instead calls the _RestrictedProxy + # constructor. Return the field value. + if len(args) == 1: + return args[0] + return super().__new__(cls) + def __init__(self, *args, **kwargs) -> None: - # When we instantiate this class, we have the signature of: - # __init__( - # self, - # name: str, - # obj: Any, - # context: RestrictionContext, - # matcher: SandboxMatcher - # ) - # However when Python subclasses a class, it calls metaclass() on the + # When Python subclasses a class, it calls metaclass() on the # class object which doesn't match these args. For now, we'll just # ignore inits on these metadata classes. # TODO(cretz): Properly support subclassing restricted classes in # sandbox - if isinstance(args[2], RestrictionContext): + if len(args) == 4 and isinstance(args[2], RestrictionContext): _trace("__init__ on %s", args[0]) _RestrictionState( name=args[0], obj=args[1], context=args[2], matcher=args[3] From 9e4d0a12ce1725b3b616a3ac1cd55f66f3dd02c6 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 23:07:39 -0500 Subject: [PATCH 18/96] Test pydantic usage in workflow --- tests/contrib/test_pydantic.py | 50 ++++++++++++++++++++ tests/worker/workflow_sandbox/test_runner.py | 13 ----- 2 files changed, 50 insertions(+), 13 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index f918ab384..2e605281f 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -166,3 +166,53 @@ async def test_mixed_collection_types(client: Client): ) assert orig_dataclass_objects == round_tripped_dataclass_objects assert orig_pydantic_objects == round_tripped_pydantic_objects + + +@workflow.defn +class PydanticModelInWorkflow: + @workflow.run + async def run(self) -> None: + o1, _ = make_pydantic_objects() + assert isinstance(o1, MyPydanticModel) + assert isinstance(o1, BaseModel) + assert isinstance(o1.ip_field, IPv4Address) + assert isinstance(o1.datetime_field, datetime) + assert issubclass(o1.annotated_datetime.__class__, datetime) + assert isinstance(o1.string_field_assigned_field, str) + assert isinstance(o1.datetime_field_assigned_field, datetime) + assert isinstance(o1.string_field_with_default, str) + assert isinstance(o1.datetime_field_with_default, datetime) + assert isinstance(o1.annotated_datetime, datetime) + assert isinstance(o1.annotated_list_of_str, list) + assert isinstance(o1.annotated_list_of_datetime, list) + assert isinstance(o1.str_short_sequence, list) + assert isinstance(o1.datetime_short_sequence, list) + assert o1.annotated_datetime == datetime(2000, 1, 2, 3, 4, 5) + assert o1.annotated_list_of_str == ["my-string-1", "my-string-2"] + assert o1.annotated_list_of_datetime == [ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ] + assert o1.str_short_sequence == ["my-string-1", "my-string-2"] + assert o1.datetime_short_sequence == [ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ] + + +async def test_pydantic_usage_in_workflow(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[PydanticModelInWorkflow], + ): + await client.execute_workflow( + PydanticModelInWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) diff --git a/tests/worker/workflow_sandbox/test_runner.py b/tests/worker/workflow_sandbox/test_runner.py index a83c5eff3..d367fed2c 100644 --- a/tests/worker/workflow_sandbox/test_runner.py +++ b/tests/worker/workflow_sandbox/test_runner.py @@ -12,7 +12,6 @@ from enum import IntEnum from typing import Callable, Dict, List, Optional, Sequence, Set, Type -import pydantic import pytest import temporalio.worker.workflow_sandbox._restrictions @@ -390,10 +389,6 @@ async def test_workflow_sandbox_with_proto(client: Client): assert result is not param and result == param -class PydanticMessage(pydantic.BaseModel): - content: datetime - - @workflow.defn class KnownIssuesWorkflow: @workflow.run @@ -413,14 +408,6 @@ async def run(self) -> None: except RuntimeError as err: assert "Restriction state not present" in str(err) - # Using a datetime in binary-compiled Pydantic skips our issubclass when - # building their validators causing it to use date instead - # TODO(cretz): https://github.com/temporalio/sdk-python/issues/207 - if pydantic.compiled: - assert isinstance(PydanticMessage(content=workflow.now()).content, date) - else: - assert isinstance(PydanticMessage(content=workflow.now()).content, datetime) - async def test_workflow_sandbox_known_issues(client: Client): async with new_worker(client, KnownIssuesWorkflow) as worker: From 62bdfccd25a4ad8981611829b01620a74aecf89a Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 23:25:56 -0500 Subject: [PATCH 19/96] Don't restrict datetime instances --- temporalio/worker/workflow_sandbox/_restrictions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 5c4c2a3c8..321f310a4 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -17,6 +17,7 @@ import warnings from copy import copy, deepcopy from dataclasses import dataclass +from datetime import datetime from typing import ( Any, Callable, @@ -946,7 +947,7 @@ def r_op(obj: Any, other: Any) -> Any: def _is_restrictable(v: Any) -> bool: return v is not None and not isinstance( - v, (bool, int, float, complex, str, bytes, bytearray) + v, (bool, int, float, complex, str, bytes, bytearray, datetime) ) From 941065dede8715a0dfa75e16a2b3eab7563ed172 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 3 Feb 2025 23:48:11 -0500 Subject: [PATCH 20/96] Only use pydantic in sandbox if it can be imported --- .../worker/workflow_sandbox/_restrictions.py | 31 ++++++++++++------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 321f310a4..cfca09e46 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -32,8 +32,13 @@ cast, ) -from pydantic import GetCoreSchemaHandler -from pydantic_core import CoreSchema, core_schema +try: + import pydantic + import pydantic_core + + HAVE_PYDANTIC = True +except ImportError: + HAVE_PYDANTIC = False import temporalio.workflow @@ -986,7 +991,7 @@ def __init__(self, *args, **kwargs) -> None: _trace("__init__ unrecognized with args %s", args) def __getattribute__(self, __name: str) -> Any: - if __name == "__get_pydantic_core_schema__": + if HAVE_PYDANTIC and __name == "__get_pydantic_core_schema__": return object.__getattribute__(self, "__get_pydantic_core_schema__") state = _RestrictionState.from_proxy(self) _trace("__getattribute__ %s on %s", __name, state.name) @@ -1037,14 +1042,18 @@ def __getitem__(self, key: Any) -> Any: ) return ret - # Instruct pydantic to use the proxied type when determining the schema - @classmethod - def __get_pydantic_core_schema__( - cls, source_type: Any, handler: GetCoreSchemaHandler - ) -> CoreSchema: - return core_schema.no_info_after_validator_function( - cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) - ) + if HAVE_PYDANTIC: + # Instruct pydantic to use the proxied type when determining the schema + # https://docs.pydantic.dev/latest/concepts/types/#customizing-validation-with-__get_pydantic_core_schema__ + @classmethod + def __get_pydantic_core_schema__( + cls, + source_type: Any, + handler: pydantic.GetCoreSchemaHandler, + ) -> pydantic_core.CoreSchema: + return pydantic_core.core_schema.no_info_after_validator_function( + cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) + ) __doc__ = _RestrictedProxyLookup( # type: ignore class_value=__doc__, fallback_func=lambda self: type(self).__doc__, is_attr=True From bd22c07fdffa580539b50fb2ec885727314875ab Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 4 Feb 2025 10:08:30 -0500 Subject: [PATCH 21/96] Expand tests --- tests/contrib/test_pydantic.py | 48 +++++++++++++++++++++++++++------- 1 file changed, 38 insertions(+), 10 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 2e605281f..f834fcfc9 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -169,38 +169,66 @@ async def test_mixed_collection_types(client: Client): @workflow.defn -class PydanticModelInWorkflow: +class PydanticModelUsageWorkflow: @workflow.run async def run(self) -> None: o1, _ = make_pydantic_objects() assert isinstance(o1, MyPydanticModel) assert isinstance(o1, BaseModel) assert isinstance(o1.ip_field, IPv4Address) + assert isinstance(o1.string_field_assigned_field, str) + assert isinstance(o1.string_field_with_default, str) + assert isinstance(o1.annotated_list_of_str, list) + assert isinstance(o1.str_short_sequence, list) + assert o1.annotated_list_of_str == ["my-string-1", "my-string-2"] + assert o1.str_short_sequence == ["my-string-1", "my-string-2"] + + +async def test_pydantic_model_usage_in_workflow(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[PydanticModelUsageWorkflow], + ): + await client.execute_workflow( + PydanticModelUsageWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + + +@workflow.defn +class DatetimeUsageWorkflow: + @workflow.run + async def run(self) -> None: + dt = workflow.now() + assert isinstance(dt, datetime) + assert issubclass(dt.__class__, datetime) + o1, _ = make_pydantic_objects() assert isinstance(o1.datetime_field, datetime) assert issubclass(o1.annotated_datetime.__class__, datetime) - assert isinstance(o1.string_field_assigned_field, str) assert isinstance(o1.datetime_field_assigned_field, datetime) - assert isinstance(o1.string_field_with_default, str) assert isinstance(o1.datetime_field_with_default, datetime) assert isinstance(o1.annotated_datetime, datetime) - assert isinstance(o1.annotated_list_of_str, list) assert isinstance(o1.annotated_list_of_datetime, list) - assert isinstance(o1.str_short_sequence, list) assert isinstance(o1.datetime_short_sequence, list) assert o1.annotated_datetime == datetime(2000, 1, 2, 3, 4, 5) - assert o1.annotated_list_of_str == ["my-string-1", "my-string-2"] assert o1.annotated_list_of_datetime == [ datetime(2000, 1, 2, 3, 4, 5), datetime(2000, 11, 12, 13, 14, 15), ] - assert o1.str_short_sequence == ["my-string-1", "my-string-2"] assert o1.datetime_short_sequence == [ datetime(2000, 1, 2, 3, 4, 5), datetime(2000, 11, 12, 13, 14, 15), ] -async def test_pydantic_usage_in_workflow(client: Client): +async def test_datetime_usage_in_workflow(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) @@ -209,10 +237,10 @@ async def test_pydantic_usage_in_workflow(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[PydanticModelInWorkflow], + workflows=[DatetimeUsageWorkflow], ): await client.execute_workflow( - PydanticModelInWorkflow.run, + DatetimeUsageWorkflow.run, id=str(uuid.uuid4()), task_queue=task_queue_name, ) From 781e0808e50b0708f35985ae1002189cd8f593f6 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 13:22:05 -0500 Subject: [PATCH 22/96] Fix __get_pydantic_core_schema__ implementation --- .../worker/workflow_sandbox/_restrictions.py | 35 ++++++------------- 1 file changed, 11 insertions(+), 24 deletions(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index cfca09e46..073ded05c 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -957,32 +957,21 @@ def _is_restrictable(v: Any) -> bool: class _RestrictedProxy: - # When we instantiate this class, we have the signature of: - # __init__( - # self, - # name: str, - # obj: Any, - # context: RestrictionContext, - # matcher: SandboxMatcher - # ) - # However there are some edge cases; see comments below. - - def __new__(cls, *args, **kwargs) -> Any: - # When pydantic instantiates a model containing a field whose type - # annotation is proxied, it attempts to call the field type constructor - # with the field value but instead calls the _RestrictedProxy - # constructor. Return the field value. - if len(args) == 1: - return args[0] - return super().__new__(cls) - def __init__(self, *args, **kwargs) -> None: - # When Python subclasses a class, it calls metaclass() on the + # When we instantiate this class, we have the signature of: + # __init__( + # self, + # name: str, + # obj: Any, + # context: RestrictionContext, + # matcher: SandboxMatcher + # ) + # However when Python subclasses a class, it calls metaclass() on the # class object which doesn't match these args. For now, we'll just # ignore inits on these metadata classes. # TODO(cretz): Properly support subclassing restricted classes in # sandbox - if len(args) == 4 and isinstance(args[2], RestrictionContext): + if isinstance(args[2], RestrictionContext): _trace("__init__ on %s", args[0]) _RestrictionState( name=args[0], obj=args[1], context=args[2], matcher=args[3] @@ -1051,9 +1040,7 @@ def __get_pydantic_core_schema__( source_type: Any, handler: pydantic.GetCoreSchemaHandler, ) -> pydantic_core.CoreSchema: - return pydantic_core.core_schema.no_info_after_validator_function( - cls, handler(RestrictionContext.unwrap_if_proxied(source_type)) - ) + return handler(RestrictionContext.unwrap_if_proxied(source_type)) __doc__ = _RestrictedProxyLookup( # type: ignore class_value=__doc__, fallback_func=lambda self: type(self).__doc__, is_attr=True From eb60fe63036acd66645be43a9842f83ea27f09ca Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 14:11:09 -0500 Subject: [PATCH 23/96] TEST FAILURE: Refactor tests and use two models --- tests/contrib/test_pydantic.py | 106 ++++++++++++++++----------------- 1 file changed, 52 insertions(+), 54 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index f834fcfc9..0c8d08574 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, Tuple, TypeVar +from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -18,55 +18,74 @@ class MyPydanticModel(BaseModel): ip_field: IPv4Address - datetime_field: datetime string_field_assigned_field: str = Field() - datetime_field_assigned_field: datetime = Field() string_field_with_default: str = Field(default_factory=lambda: "my-string") + annotated_list_of_str: Annotated[ + List[str], Field(), WithJsonSchema({"extra": "data"}) + ] + str_short_sequence: ShortSequence[List[str]] + + def _make_assertions(self): + assert isinstance(self.ip_field, IPv4Address) + assert isinstance(self.string_field_assigned_field, str) + assert isinstance(self.string_field_with_default, str) + assert isinstance(self.annotated_list_of_str, list) + assert isinstance(self.str_short_sequence, list) + assert self.annotated_list_of_str == ["my-string-1", "my-string-2"] + assert self.str_short_sequence == ["my-string-1", "my-string-2"] + + +class MyPydanticDatetimeModel(BaseModel): + datetime_field: datetime + datetime_field_assigned_field: datetime = Field() datetime_field_with_default: datetime = Field( default_factory=lambda: datetime(2000, 1, 2, 3, 4, 5) ) annotated_datetime: Annotated[datetime, Field(), WithJsonSchema({"extra": "data"})] - annotated_list_of_str: Annotated[ - List[str], Field(), WithJsonSchema({"extra": "data"}) - ] annotated_list_of_datetime: Annotated[ List[datetime], Field(), WithJsonSchema({"extra": "data"}) ] - str_short_sequence: ShortSequence[List[str]] datetime_short_sequence: ShortSequence[List[datetime]] + def _make_assertions(self): + _assert_datetime_validity(self.datetime_field) + _assert_datetime_validity(self.datetime_field_assigned_field) + _assert_datetime_validity(self.datetime_field_with_default) + _assert_datetime_validity(self.annotated_datetime) + assert isinstance(self.annotated_list_of_datetime, list) + assert isinstance(self.datetime_short_sequence, list) + assert self.annotated_datetime == datetime(2000, 1, 2, 3, 4, 5) + assert self.annotated_list_of_datetime == [ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ] + assert self.datetime_short_sequence == [ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2000, 11, 12, 13, 14, 15), + ] + + +def _assert_datetime_validity(dt: datetime): + assert isinstance(dt, datetime) + assert issubclass(dt.__class__, datetime) + -def make_pydantic_objects() -> List[MyPydanticModel]: +def make_pydantic_objects() -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), - datetime_field=datetime(2000, 1, 2, 3, 4, 5), string_field_assigned_field="my-string", - datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), - annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), annotated_list_of_str=["my-string-1", "my-string-2"], - annotated_list_of_datetime=[ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), - ], str_short_sequence=["my-string-1", "my-string-2"], - datetime_short_sequence=[ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), - ], ), - MyPydanticModel( - ip_field=IPv4Address("127.0.0.2"), + MyPydanticDatetimeModel( datetime_field=datetime(2001, 2, 3, 4, 5, 6), - string_field_assigned_field="my-string", datetime_field_assigned_field=datetime(2000, 2, 3, 4, 5, 6), annotated_datetime=datetime(2001, 2, 3, 4, 5, 6), - annotated_list_of_str=["my-string-3", "my-string-4"], annotated_list_of_datetime=[ datetime(2001, 2, 3, 4, 5, 6), datetime(2001, 12, 13, 14, 15, 16), ], - str_short_sequence=["my-string-3", "my-string-4"], datetime_short_sequence=[ datetime(2001, 2, 3, 4, 5, 6), datetime(2001, 12, 13, 14, 15, 16), @@ -77,15 +96,17 @@ def make_pydantic_objects() -> List[MyPydanticModel]: @activity.defn async def list_of_pydantic_models_activity( - models: List[MyPydanticModel], -) -> List[MyPydanticModel]: + models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]], +) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: return models @workflow.defn class ListOfPydanticObjectsWorkflow: @workflow.run - async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: + async def run( + self, models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: return await workflow.execute_activity( list_of_pydantic_models_activity, models, @@ -172,16 +193,8 @@ async def test_mixed_collection_types(client: Client): class PydanticModelUsageWorkflow: @workflow.run async def run(self) -> None: - o1, _ = make_pydantic_objects() - assert isinstance(o1, MyPydanticModel) - assert isinstance(o1, BaseModel) - assert isinstance(o1.ip_field, IPv4Address) - assert isinstance(o1.string_field_assigned_field, str) - assert isinstance(o1.string_field_with_default, str) - assert isinstance(o1.annotated_list_of_str, list) - assert isinstance(o1.str_short_sequence, list) - assert o1.annotated_list_of_str == ["my-string-1", "my-string-2"] - assert o1.str_short_sequence == ["my-string-1", "my-string-2"] + for o in make_pydantic_objects(): + o._make_assertions() async def test_pydantic_model_usage_in_workflow(client: Client): @@ -209,23 +222,8 @@ async def run(self) -> None: dt = workflow.now() assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) - o1, _ = make_pydantic_objects() - assert isinstance(o1.datetime_field, datetime) - assert issubclass(o1.annotated_datetime.__class__, datetime) - assert isinstance(o1.datetime_field_assigned_field, datetime) - assert isinstance(o1.datetime_field_with_default, datetime) - assert isinstance(o1.annotated_datetime, datetime) - assert isinstance(o1.annotated_list_of_datetime, list) - assert isinstance(o1.datetime_short_sequence, list) - assert o1.annotated_datetime == datetime(2000, 1, 2, 3, 4, 5) - assert o1.annotated_list_of_datetime == [ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), - ] - assert o1.datetime_short_sequence == [ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), - ] + for o in make_pydantic_objects(): + o._make_assertions() async def test_datetime_usage_in_workflow(client: Client): From ccdbe0fd4317e5a23db22b3d8bc321058392773c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 14:12:27 -0500 Subject: [PATCH 24/96] One model --- tests/contrib/test_pydantic.py | 25 ++++++------------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 0c8d08574..df9e1abe8 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union +from typing import Annotated, Any, List, Sequence, Tuple, TypeVar from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -70,7 +70,7 @@ def _assert_datetime_validity(dt: datetime): assert issubclass(dt.__class__, datetime) -def make_pydantic_objects() -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: +def make_pydantic_objects() -> List[MyPydanticModel]: return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), @@ -78,26 +78,13 @@ def make_pydantic_objects() -> List[Union[MyPydanticModel, MyPydanticDatetimeMod annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], ), - MyPydanticDatetimeModel( - datetime_field=datetime(2001, 2, 3, 4, 5, 6), - datetime_field_assigned_field=datetime(2000, 2, 3, 4, 5, 6), - annotated_datetime=datetime(2001, 2, 3, 4, 5, 6), - annotated_list_of_datetime=[ - datetime(2001, 2, 3, 4, 5, 6), - datetime(2001, 12, 13, 14, 15, 16), - ], - datetime_short_sequence=[ - datetime(2001, 2, 3, 4, 5, 6), - datetime(2001, 12, 13, 14, 15, 16), - ], - ), ] @activity.defn async def list_of_pydantic_models_activity( - models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]], -) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: + models: List[MyPydanticModel], +) -> List[MyPydanticModel]: return models @@ -105,8 +92,8 @@ async def list_of_pydantic_models_activity( class ListOfPydanticObjectsWorkflow: @workflow.run async def run( - self, models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]] - ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: + self, models: List[MyPydanticModel] + ) -> List[MyPydanticModel]: return await workflow.execute_activity( list_of_pydantic_models_activity, models, From c1ba55b1e8fafc45b674282848291367942303f5 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 14:24:01 -0500 Subject: [PATCH 25/96] Refactor tests --- tests/contrib/test_pydantic.py | 120 ++++++++++++++++++++++++++------- 1 file changed, 95 insertions(+), 25 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index df9e1abe8..7c8e5d3c8 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,7 +2,7 @@ import uuid from datetime import datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, Tuple, TypeVar +from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -25,7 +25,7 @@ class MyPydanticModel(BaseModel): ] str_short_sequence: ShortSequence[List[str]] - def _make_assertions(self): + def _check_instance(self): assert isinstance(self.ip_field, IPv4Address) assert isinstance(self.string_field_assigned_field, str) assert isinstance(self.string_field_with_default, str) @@ -47,7 +47,7 @@ class MyPydanticDatetimeModel(BaseModel): ] datetime_short_sequence: ShortSequence[List[datetime]] - def _make_assertions(self): + def _check_instance(self): _assert_datetime_validity(self.datetime_field) _assert_datetime_validity(self.datetime_field_assigned_field) _assert_datetime_validity(self.datetime_field_with_default) @@ -57,11 +57,11 @@ def _make_assertions(self): assert self.annotated_datetime == datetime(2000, 1, 2, 3, 4, 5) assert self.annotated_list_of_datetime == [ datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), + datetime(2001, 11, 12, 13, 14, 15), ] assert self.datetime_short_sequence == [ datetime(2000, 1, 2, 3, 4, 5), - datetime(2000, 11, 12, 13, 14, 15), + datetime(2001, 11, 12, 13, 14, 15), ] @@ -70,7 +70,7 @@ def _assert_datetime_validity(dt: datetime): assert issubclass(dt.__class__, datetime) -def make_pydantic_objects() -> List[MyPydanticModel]: +def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), @@ -81,42 +81,109 @@ def make_pydantic_objects() -> List[MyPydanticModel]: ] +def make_heterogenous_list_of_pydantic_objects() -> ( + List[Union[MyPydanticModel, MyPydanticDatetimeModel]] +): + return [ + MyPydanticModel( + ip_field=IPv4Address("127.0.0.1"), + string_field_assigned_field="my-string", + annotated_list_of_str=["my-string-1", "my-string-2"], + str_short_sequence=["my-string-1", "my-string-2"], + ), + MyPydanticDatetimeModel( + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), + annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), + annotated_list_of_datetime=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2001, 11, 12, 13, 14, 15), + ], + datetime_short_sequence=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2001, 11, 12, 13, 14, 15), + ], + ), + ] + + @activity.defn -async def list_of_pydantic_models_activity( +async def homogeneous_list_of_pydantic_models_activity( models: List[MyPydanticModel], ) -> List[MyPydanticModel]: return models +@activity.defn +async def heterogeneous_list_of_pydantic_models_activity( + models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]], +) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: + return models + + @workflow.defn -class ListOfPydanticObjectsWorkflow: +class HomogenousListOfPydanticObjectsWorkflow: + @workflow.run + async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: + return await workflow.execute_activity( + homogeneous_list_of_pydantic_models_activity, + models, + start_to_close_timeout=timedelta(minutes=1), + ) + + +@workflow.defn +class HeterogenousListOfPydanticObjectsWorkflow: @workflow.run async def run( - self, models: List[MyPydanticModel] - ) -> List[MyPydanticModel]: + self, models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: return await workflow.execute_activity( - list_of_pydantic_models_activity, + heterogeneous_list_of_pydantic_models_activity, models, start_to_close_timeout=timedelta(minutes=1), ) -async def test_field_conversion(client: Client): +async def test_homogeneous_list_of_pydantic_objects(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_pydantic_objects = make_homogeneous_list_of_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[HomogenousListOfPydanticObjectsWorkflow], + activities=[homogeneous_list_of_pydantic_models_activity], + ): + round_tripped_pydantic_objects = await client.execute_workflow( + HomogenousListOfPydanticObjectsWorkflow.run, + orig_pydantic_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert orig_pydantic_objects == round_tripped_pydantic_objects + + +async def test_heterogenous_list_of_pydantic_objects(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_pydantic_objects = make_pydantic_objects() + orig_pydantic_objects = make_heterogenous_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, - workflows=[ListOfPydanticObjectsWorkflow], - activities=[list_of_pydantic_models_activity], + workflows=[HeterogenousListOfPydanticObjectsWorkflow], + activities=[heterogeneous_list_of_pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( - ListOfPydanticObjectsWorkflow.run, + HeterogenousListOfPydanticObjectsWorkflow.run, orig_pydantic_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, @@ -137,11 +204,16 @@ def make_dataclass_objects() -> List[MyDataClass]: class MixedCollectionTypesWorkflow: @workflow.run async def run( - self, input: Tuple[List[MyDataClass], List[MyPydanticModel]] - ) -> Tuple[List[MyDataClass], List[MyPydanticModel]]: + self, + input: Tuple[ + List[MyDataClass], List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + ], + ) -> Tuple[ + List[MyDataClass], List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( - list_of_pydantic_models_activity, + heterogeneous_list_of_pydantic_models_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -155,13 +227,13 @@ async def test_mixed_collection_types(client: Client): task_queue_name = str(uuid.uuid4()) orig_dataclass_objects = make_dataclass_objects() - orig_pydantic_objects = make_pydantic_objects() + orig_pydantic_objects = make_heterogenous_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, workflows=[MixedCollectionTypesWorkflow], - activities=[list_of_pydantic_models_activity], + activities=[heterogeneous_list_of_pydantic_models_activity], ): ( round_tripped_dataclass_objects, @@ -180,8 +252,8 @@ async def test_mixed_collection_types(client: Client): class PydanticModelUsageWorkflow: @workflow.run async def run(self) -> None: - for o in make_pydantic_objects(): - o._make_assertions() + for o in make_heterogenous_list_of_pydantic_objects(): + o._check_instance() async def test_pydantic_model_usage_in_workflow(client: Client): @@ -209,8 +281,6 @@ async def run(self) -> None: dt = workflow.now() assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) - for o in make_pydantic_objects(): - o._make_assertions() async def test_datetime_usage_in_workflow(client: Client): From 2811d64eb9f89790f2a0e018c3e5f67a74244b25 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 14:59:51 -0500 Subject: [PATCH 26/96] test date --- tests/contrib/test_pydantic.py | 61 +++++++++++++++++++++++++++++----- 1 file changed, 53 insertions(+), 8 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 7c8e5d3c8..31eda48a0 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,6 +1,6 @@ import dataclasses import uuid -from datetime import datetime, timedelta +from datetime import date, datetime, timedelta from ipaddress import IPv4Address from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union @@ -65,11 +65,44 @@ def _check_instance(self): ] +class MyPydanticDateModel(BaseModel): + date_field: date + date_field_assigned_field: date = Field() + date_field_with_default: date = Field(default_factory=lambda: date(2000, 1, 2)) + annotated_date: Annotated[date, Field(), WithJsonSchema({"extra": "data"})] + annotated_list_of_date: Annotated[ + List[date], Field(), WithJsonSchema({"extra": "data"}) + ] + date_short_sequence: ShortSequence[List[date]] + + def _check_instance(self): + _assert_date_validity(self.date_field) + _assert_date_validity(self.date_field_assigned_field) + _assert_date_validity(self.date_field_with_default) + _assert_date_validity(self.annotated_date) + assert isinstance(self.annotated_list_of_date, list) + assert isinstance(self.date_short_sequence, list) + assert self.annotated_date == date(2000, 1, 2) + assert self.annotated_list_of_date == [ + date(2000, 1, 2), + date(2001, 11, 12), + ] + assert self.date_short_sequence == [ + date(2000, 1, 2), + date(2001, 11, 12), + ] + + def _assert_datetime_validity(dt: datetime): assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) +def _assert_date_validity(d: date): + assert isinstance(d, date) + assert issubclass(d.__class__, date) + + def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: return [ MyPydanticModel( @@ -82,7 +115,7 @@ def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: def make_heterogenous_list_of_pydantic_objects() -> ( - List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]] ): return [ MyPydanticModel( @@ -104,6 +137,13 @@ def make_heterogenous_list_of_pydantic_objects() -> ( datetime(2001, 11, 12, 13, 14, 15), ], ), + MyPydanticDateModel( + date_field=date(2000, 1, 2), + date_field_assigned_field=date(2000, 1, 2), + annotated_date=date(2000, 1, 2), + annotated_list_of_date=[date(2000, 1, 2), date(2001, 11, 12)], + date_short_sequence=[date(2000, 1, 2), date(2001, 11, 12)], + ), ] @@ -116,8 +156,8 @@ async def homogeneous_list_of_pydantic_models_activity( @activity.defn async def heterogeneous_list_of_pydantic_models_activity( - models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]], -) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: + models: List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], +) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]]: return models @@ -136,8 +176,11 @@ async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: class HeterogenousListOfPydanticObjectsWorkflow: @workflow.run async def run( - self, models: List[Union[MyPydanticModel, MyPydanticDatetimeModel]] - ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel]]: + self, + models: List[ + Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel] + ], + ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]]: return await workflow.execute_activity( heterogeneous_list_of_pydantic_models_activity, models, @@ -206,10 +249,12 @@ class MixedCollectionTypesWorkflow: async def run( self, input: Tuple[ - List[MyDataClass], List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + List[MyDataClass], + List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], ], ) -> Tuple[ - List[MyDataClass], List[Union[MyPydanticModel, MyPydanticDatetimeModel]] + List[MyDataClass], + List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( From 825fec4e7db76bc4f431979a28fe673837a258b0 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 15:09:27 -0500 Subject: [PATCH 27/96] Don't restrict date instances --- .../worker/workflow_sandbox/_restrictions.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 073ded05c..88b595386 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -7,6 +7,7 @@ from __future__ import annotations import dataclasses +import datetime import functools import inspect import logging @@ -17,7 +18,6 @@ import warnings from copy import copy, deepcopy from dataclasses import dataclass -from datetime import datetime from typing import ( Any, Callable, @@ -952,7 +952,18 @@ def r_op(obj: Any, other: Any) -> Any: def _is_restrictable(v: Any) -> bool: return v is not None and not isinstance( - v, (bool, int, float, complex, str, bytes, bytearray, datetime) + v, + ( + bool, + int, + float, + complex, + str, + bytes, + bytearray, + datetime.date, + datetime.datetime, + ), ) From e1da66461e10aaf2ee85c15952197608398b426c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 15:12:34 -0500 Subject: [PATCH 28/96] Test timedelta --- tests/contrib/test_pydantic.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 31eda48a0..e73f015f3 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -18,6 +18,7 @@ class MyPydanticModel(BaseModel): ip_field: IPv4Address + timedelta_field: timedelta string_field_assigned_field: str = Field() string_field_with_default: str = Field(default_factory=lambda: "my-string") annotated_list_of_str: Annotated[ @@ -27,6 +28,8 @@ class MyPydanticModel(BaseModel): def _check_instance(self): assert isinstance(self.ip_field, IPv4Address) + assert isinstance(self.timedelta_field, timedelta) + assert self.timedelta_field == timedelta(1, 2, 3, 4, 5, 6, 7) assert isinstance(self.string_field_assigned_field, str) assert isinstance(self.string_field_with_default, str) assert isinstance(self.annotated_list_of_str, list) @@ -107,6 +110,7 @@ def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), + timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], @@ -120,6 +124,7 @@ def make_heterogenous_list_of_pydantic_objects() -> ( return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), + timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], From d39ce6e69cc13f5db54732dccaf171e519a29c6e Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:13:31 -0500 Subject: [PATCH 29/96] Revert "Test timedelta" This reverts commit d8712beb19f676511a0146b06b99f9cc8de29878. --- tests/contrib/test_pydantic.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index e73f015f3..31eda48a0 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -18,7 +18,6 @@ class MyPydanticModel(BaseModel): ip_field: IPv4Address - timedelta_field: timedelta string_field_assigned_field: str = Field() string_field_with_default: str = Field(default_factory=lambda: "my-string") annotated_list_of_str: Annotated[ @@ -28,8 +27,6 @@ class MyPydanticModel(BaseModel): def _check_instance(self): assert isinstance(self.ip_field, IPv4Address) - assert isinstance(self.timedelta_field, timedelta) - assert self.timedelta_field == timedelta(1, 2, 3, 4, 5, 6, 7) assert isinstance(self.string_field_assigned_field, str) assert isinstance(self.string_field_with_default, str) assert isinstance(self.annotated_list_of_str, list) @@ -110,7 +107,6 @@ def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), - timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], @@ -124,7 +120,6 @@ def make_heterogenous_list_of_pydantic_objects() -> ( return [ MyPydanticModel( ip_field=IPv4Address("127.0.0.1"), - timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], From 3287edf7c0c9bade5506d0debaba0e5350f35fb4 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:13:54 -0500 Subject: [PATCH 30/96] Rename --- tests/contrib/test_pydantic.py | 38 ++++++++++++++++------------------ 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 31eda48a0..6b68b7b85 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -16,7 +16,7 @@ ShortSequence = Annotated[SequenceType, Len(max_length=2)] -class MyPydanticModel(BaseModel): +class PydanticModel(BaseModel): ip_field: IPv4Address string_field_assigned_field: str = Field() string_field_with_default: str = Field(default_factory=lambda: "my-string") @@ -35,7 +35,7 @@ def _check_instance(self): assert self.str_short_sequence == ["my-string-1", "my-string-2"] -class MyPydanticDatetimeModel(BaseModel): +class PydanticDatetimeModel(BaseModel): datetime_field: datetime datetime_field_assigned_field: datetime = Field() datetime_field_with_default: datetime = Field( @@ -65,7 +65,7 @@ def _check_instance(self): ] -class MyPydanticDateModel(BaseModel): +class PydanticDateModel(BaseModel): date_field: date date_field_assigned_field: date = Field() date_field_with_default: date = Field(default_factory=lambda: date(2000, 1, 2)) @@ -103,9 +103,9 @@ def _assert_date_validity(d: date): assert issubclass(d.__class__, date) -def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: +def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: return [ - MyPydanticModel( + PydanticModel( ip_field=IPv4Address("127.0.0.1"), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], @@ -115,16 +115,16 @@ def make_homogeneous_list_of_pydantic_objects() -> List[MyPydanticModel]: def make_heterogenous_list_of_pydantic_objects() -> ( - List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]] + List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]] ): return [ - MyPydanticModel( + PydanticModel( ip_field=IPv4Address("127.0.0.1"), string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], ), - MyPydanticDatetimeModel( + PydanticDatetimeModel( datetime_field=datetime(2000, 1, 2, 3, 4, 5), datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), @@ -137,7 +137,7 @@ def make_heterogenous_list_of_pydantic_objects() -> ( datetime(2001, 11, 12, 13, 14, 15), ], ), - MyPydanticDateModel( + PydanticDateModel( date_field=date(2000, 1, 2), date_field_assigned_field=date(2000, 1, 2), annotated_date=date(2000, 1, 2), @@ -149,22 +149,22 @@ def make_heterogenous_list_of_pydantic_objects() -> ( @activity.defn async def homogeneous_list_of_pydantic_models_activity( - models: List[MyPydanticModel], -) -> List[MyPydanticModel]: + models: List[PydanticModel], +) -> List[PydanticModel]: return models @activity.defn async def heterogeneous_list_of_pydantic_models_activity( - models: List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], -) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]]: + models: List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], +) -> List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]]: return models @workflow.defn class HomogenousListOfPydanticObjectsWorkflow: @workflow.run - async def run(self, models: List[MyPydanticModel]) -> List[MyPydanticModel]: + async def run(self, models: List[PydanticModel]) -> List[PydanticModel]: return await workflow.execute_activity( homogeneous_list_of_pydantic_models_activity, models, @@ -177,10 +177,8 @@ class HeterogenousListOfPydanticObjectsWorkflow: @workflow.run async def run( self, - models: List[ - Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel] - ], - ) -> List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]]: + models: List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], + ) -> List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]]: return await workflow.execute_activity( heterogeneous_list_of_pydantic_models_activity, models, @@ -250,11 +248,11 @@ async def run( self, input: Tuple[ List[MyDataClass], - List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], + List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], ], ) -> Tuple[ List[MyDataClass], - List[Union[MyPydanticModel, MyPydanticDatetimeModel, MyPydanticDateModel]], + List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( From 5d0a3e461244f074567bb06a6b5d8a4540ed7bf8 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:16:22 -0500 Subject: [PATCH 31/96] test timedelta --- tests/contrib/test_pydantic.py | 75 +++++++++++++++++++++++++++++----- 1 file changed, 64 insertions(+), 11 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 6b68b7b85..c4a17e6c7 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -93,6 +93,46 @@ def _check_instance(self): ] +class PydanticTimedeltaModel(BaseModel): + timedelta_field: timedelta + timedelta_field_assigned_field: timedelta = Field() + timedelta_field_with_default: timedelta = Field( + default_factory=lambda: timedelta(days=1) + ) + annotated_timedelta: Annotated[ + timedelta, Field(), WithJsonSchema({"extra": "data"}) + ] + annotated_list_of_timedelta: Annotated[ + List[timedelta], Field(), WithJsonSchema({"extra": "data"}) + ] + timedelta_short_sequence: ShortSequence[List[timedelta]] + + def _check_instance(self): + _assert_timedelta_validity(self.timedelta_field) + _assert_timedelta_validity(self.timedelta_field_assigned_field) + _assert_timedelta_validity(self.timedelta_field_with_default) + _assert_timedelta_validity(self.annotated_timedelta) + assert isinstance(self.annotated_list_of_timedelta, list) + for td in self.annotated_list_of_timedelta: + _assert_timedelta_validity(td) + assert isinstance(self.timedelta_short_sequence, list) + for td in self.timedelta_short_sequence: + _assert_timedelta_validity(td) + assert self.annotated_timedelta == timedelta(1, 2, 3, 4, 5, 6, 7) + assert self.annotated_list_of_timedelta == [ + timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta(2, 3, 4, 5, 6, 7, 8), + ] + + +PydanticModels = Union[ + PydanticModel, + PydanticDatetimeModel, + PydanticDateModel, + PydanticTimedeltaModel, +] + + def _assert_datetime_validity(dt: datetime): assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) @@ -103,6 +143,11 @@ def _assert_date_validity(d: date): assert issubclass(d.__class__, date) +def _assert_timedelta_validity(td: timedelta): + assert isinstance(td, timedelta) + assert issubclass(td.__class__, timedelta) + + def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: return [ PydanticModel( @@ -114,9 +159,7 @@ def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: ] -def make_heterogenous_list_of_pydantic_objects() -> ( - List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]] -): +def make_heterogenous_list_of_pydantic_objects() -> List[PydanticModels]: return [ PydanticModel( ip_field=IPv4Address("127.0.0.1"), @@ -144,6 +187,19 @@ def make_heterogenous_list_of_pydantic_objects() -> ( annotated_list_of_date=[date(2000, 1, 2), date(2001, 11, 12)], date_short_sequence=[date(2000, 1, 2), date(2001, 11, 12)], ), + PydanticTimedeltaModel( + timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta_field_assigned_field=timedelta(1, 2, 3, 4, 5, 6, 7), + annotated_timedelta=timedelta(1, 2, 3, 4, 5, 6, 7), + annotated_list_of_timedelta=[ + timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta(2, 3, 4, 5, 6, 7, 8), + ], + timedelta_short_sequence=[ + timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta(2, 3, 4, 5, 6, 7, 8), + ], + ), ] @@ -156,8 +212,8 @@ async def homogeneous_list_of_pydantic_models_activity( @activity.defn async def heterogeneous_list_of_pydantic_models_activity( - models: List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], -) -> List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]]: + models: List[PydanticModels], +) -> List[PydanticModels]: return models @@ -175,10 +231,7 @@ async def run(self, models: List[PydanticModel]) -> List[PydanticModel]: @workflow.defn class HeterogenousListOfPydanticObjectsWorkflow: @workflow.run - async def run( - self, - models: List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], - ) -> List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]]: + async def run(self, models: List[PydanticModels]) -> List[PydanticModels]: return await workflow.execute_activity( heterogeneous_list_of_pydantic_models_activity, models, @@ -248,11 +301,11 @@ async def run( self, input: Tuple[ List[MyDataClass], - List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], + List[PydanticModels], ], ) -> Tuple[ List[MyDataClass], - List[Union[PydanticModel, PydanticDatetimeModel, PydanticDateModel]], + List[PydanticModels], ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( From c31569b97190bcdee5d9dde773721c086d6e4c7f Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:31:04 -0500 Subject: [PATCH 32/96] test union field --- tests/contrib/test_pydantic.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index c4a17e6c7..9cbcf5e04 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -24,6 +24,7 @@ class PydanticModel(BaseModel): List[str], Field(), WithJsonSchema({"extra": "data"}) ] str_short_sequence: ShortSequence[List[str]] + union_field: Union[int, str] def _check_instance(self): assert isinstance(self.ip_field, IPv4Address) @@ -33,6 +34,8 @@ def _check_instance(self): assert isinstance(self.str_short_sequence, list) assert self.annotated_list_of_str == ["my-string-1", "my-string-2"] assert self.str_short_sequence == ["my-string-1", "my-string-2"] + assert isinstance(self.union_field, str) + assert self.union_field == "my-string" class PydanticDatetimeModel(BaseModel): @@ -155,6 +158,7 @@ def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], + union_field="my-string", ), ] @@ -166,6 +170,7 @@ def make_heterogenous_list_of_pydantic_objects() -> List[PydanticModels]: string_field_assigned_field="my-string", annotated_list_of_str=["my-string-1", "my-string-2"], str_short_sequence=["my-string-1", "my-string-2"], + union_field="my-string", ), PydanticDatetimeModel( datetime_field=datetime(2000, 1, 2, 3, 4, 5), From 3f44ffa1c2dbf7ad4477dc36af875b9d1b2b4bc4 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:38:26 -0500 Subject: [PATCH 33/96] test type hints --- tests/contrib/test_pydantic.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 9cbcf5e04..4994675b6 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,7 +2,7 @@ import uuid from datetime import date, datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union +from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union, get_type_hints from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -37,6 +37,20 @@ def _check_instance(self): assert isinstance(self.union_field, str) assert self.union_field == "my-string" + assert get_type_hints(self) == { + "ip_field": IPv4Address, + "string_field_assigned_field": str, + "string_field_with_default": str, + "annotated_list_of_str": List[str], + "str_short_sequence": List[str], + # TODO: why not + # "annotated_list_of_str": Annotated[ + # List[str], Field(), WithJsonSchema({"extra": "data"}) + # ], + # "str_short_sequence": ShortSequence[List[str]], + "union_field": Union[int, str], + } + class PydanticDatetimeModel(BaseModel): datetime_field: datetime From 19282749e789291089441995d375351e4633c517 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 20:22:56 -0500 Subject: [PATCH 34/96] Test dunder methods --- .../workflow_sandbox/test_restrictions.py | 41 ++++++++++++++++++- 1 file changed, 40 insertions(+), 1 deletion(-) diff --git a/tests/worker/workflow_sandbox/test_restrictions.py b/tests/worker/workflow_sandbox/test_restrictions.py index 90d25454c..99ef49378 100644 --- a/tests/worker/workflow_sandbox/test_restrictions.py +++ b/tests/worker/workflow_sandbox/test_restrictions.py @@ -1,5 +1,6 @@ from __future__ import annotations +import pathlib import sys from dataclasses import dataclass from typing import ClassVar, Dict, Optional @@ -28,7 +29,7 @@ def test_workflow_sandbox_stdlib_module_names(): if len(code_lines[-1]) > 80: code_lines.append("") code_lines[-1] += mod_name - code = f'_stdlib_module_names = (\n "' + '"\n "'.join(code_lines) + '"\n)' + code = '_stdlib_module_names = (\n "' + '"\n "'.join(code_lines) + '"\n)' # TODO(cretz): Point releases may add modules :-( assert ( actual_names == _stdlib_module_names @@ -56,6 +57,44 @@ class RestrictableObject: RestrictableObject.qux = RestrictableObject(foo=RestrictableObject(bar=70), bar=80) +class RestrictableClass: + def __str__(self): + return "__str__" + + def __repr__(self): + return "__repr__" + + def __format__(self, __format_spec: str) -> str: + return "__format__" + + +def test_restricted_proxy_dunder_methods(): + restricted_class = _RestrictedProxy( + "RestrictableClass", + RestrictableClass, + RestrictionContext(), + SandboxMatcher(), + ) + restricted_obj = restricted_class() + assert type(restricted_obj) is _RestrictedProxy + assert str(restricted_obj) == "__str__" + assert repr(restricted_obj) == "__repr__" + assert format(restricted_obj, "") == "__format__" + assert f"{restricted_obj}" == "__format__" + + restricted_path = _RestrictedProxy( + "Path", + pathlib.Path, + RestrictionContext(), + SandboxMatcher(), + ) + assert isinstance(format(restricted_path, ""), str) + restricted_path_obj = restricted_path("test/path") + assert type(restricted_path_obj) is _RestrictedProxy + assert format(restricted_path_obj, "") == "test/path" + assert f"{restricted_path_obj}" == "test/path" + + def test_workflow_sandbox_restricted_proxy(): obj_class = _RestrictedProxy( "RestrictableObject", From 11ee00753999336f218e202c74aad30283dc3e58 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 16:59:01 -0500 Subject: [PATCH 35/96] Organize tests --- tests/contrib/test_pydantic.py | 270 +++++++++++++++++++++++++++++++-- 1 file changed, 254 insertions(+), 16 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 4994675b6..e1d8b258c 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,7 +2,20 @@ import uuid from datetime import date, datetime, timedelta from ipaddress import IPv4Address -from typing import Annotated, Any, List, Sequence, Tuple, TypeVar, Union, get_type_hints +from typing import ( + Annotated, + Any, + Dict, + Generic, + List, + Optional, + Sequence, + Set, + Tuple, + TypeVar, + Union, + get_type_hints, +) from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema @@ -16,6 +29,205 @@ ShortSequence = Annotated[SequenceType, Len(max_length=2)] +class BasicTypesModel(BaseModel): + int_field: int + float_field: float + str_field: str + bool_field: bool + bytes_field: bytes + none_field: None + + def _check_instance(self): + assert isinstance(self.int_field, int) + assert isinstance(self.float_field, float) + assert isinstance(self.str_field, str) + assert isinstance(self.bool_field, bool) + assert isinstance(self.bytes_field, bytes) + assert self.none_field is None + assert self.int_field == 42 + assert self.float_field == 3.14 + assert self.str_field == "hello" + assert self.bool_field is True + assert self.bytes_field == b"world" + + +def make_basic_types_object() -> BasicTypesModel: + return BasicTypesModel( + int_field=42, + float_field=3.14, + str_field="hello", + bool_field=True, + bytes_field=b"world", + none_field=None, + ) + + +class ComplexTypesModel(BaseModel): + list_field: List[str] + dict_field: Dict[str, int] + set_field: Set[int] + tuple_field: Tuple[str, int] + union_field: Union[str, int] + optional_field: Optional[str] + + def _check_instance(self): + assert isinstance(self.list_field, list) + assert isinstance(self.dict_field, dict) + assert isinstance(self.set_field, set) + assert isinstance(self.tuple_field, tuple) + assert isinstance(self.union_field, str) + assert isinstance(self.optional_field, str) + assert self.list_field == ["a", "b", "c"] + assert self.dict_field == {"x": 1, "y": 2} + assert self.set_field == {1, 2, 3} + assert self.tuple_field == ("hello", 42) + assert self.union_field == "string_or_int" + assert self.optional_field == "present" + + +def make_complex_types_object() -> ComplexTypesModel: + return ComplexTypesModel( + list_field=["a", "b", "c"], + dict_field={"x": 1, "y": 2}, + set_field={1, 2, 3}, + tuple_field=("hello", 42), + union_field="string_or_int", + optional_field="present", + ) + + +class SpecialTypesModel(BaseModel): + datetime_field: datetime + date_field: date + timedelta_field: timedelta + # path_field: Path + uuid_field: uuid.UUID + ip_field: IPv4Address + + def _check_instance(self): + assert isinstance(self.datetime_field, datetime) + assert isinstance(self.date_field, date) + assert isinstance(self.timedelta_field, timedelta) + # assert isinstance(self.path_field, Path) + assert isinstance(self.uuid_field, uuid.UUID) + assert isinstance(self.ip_field, IPv4Address) + assert self.datetime_field == datetime(2000, 1, 2, 3, 4, 5) + assert self.date_field == date(2000, 1, 2) + assert self.timedelta_field == timedelta(days=1, hours=2) + # assert self.path_field == Path("test/path") + assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") + assert self.ip_field == IPv4Address("127.0.0.1") + + +def make_special_types_object() -> SpecialTypesModel: + return SpecialTypesModel( + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + date_field=date(2000, 1, 2), + timedelta_field=timedelta(days=1, hours=2), + # path_field=Path("test/path"), + uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), + ip_field=IPv4Address("127.0.0.1"), + ) + + +class ChildModel(BaseModel): + name: str + value: int + + +class ParentModel(BaseModel): + child: ChildModel + children: List[ChildModel] + + def _check_instance(self): + assert isinstance(self.child, ChildModel) + assert isinstance(self.children, list) + assert all(isinstance(child, ChildModel) for child in self.children) + assert self.child.name == "child1" + assert self.child.value == 1 + assert len(self.children) == 2 + assert self.children[0].name == "child2" + assert self.children[0].value == 2 + assert self.children[1].name == "child3" + assert self.children[1].value == 3 + + +def make_nested_object() -> ParentModel: + return ParentModel( + child=ChildModel(name="child1", value=1), + children=[ + ChildModel(name="child2", value=2), + ChildModel(name="child3", value=3), + ], + ) + + +class FieldFeaturesModel(BaseModel): + field_with_default: str = "default" + field_with_factory: datetime = Field( + default_factory=lambda: datetime(2000, 1, 2, 3, 4, 5) + ) + field_with_constraints: int = Field(gt=0, lt=100) + field_with_alias: str = Field(alias="different_name") + + def _check_instance(self): + assert isinstance(self.field_with_default, str) + assert isinstance(self.field_with_factory, datetime) + assert isinstance(self.field_with_constraints, int) + assert isinstance(self.field_with_alias, str) + assert self.field_with_default == "default" + assert 0 < self.field_with_constraints < 100 + assert self.field_with_alias == "aliased_value" + + +def make_field_features_object() -> FieldFeaturesModel: + return FieldFeaturesModel( + field_with_constraints=50, + different_name="aliased_value", + ) + + +class AnnotatedFieldsModel(BaseModel): + max_length_str: Annotated[str, Len(max_length=10)] + custom_json: Annotated[Dict[str, Any], WithJsonSchema({"extra": "data"})] + + def _check_instance(self): + assert isinstance(self.max_length_str, str) + assert isinstance(self.custom_json, dict) + assert len(self.max_length_str) <= 10 + assert self.max_length_str == "short" + assert self.custom_json == {"key": "value"} + + +def make_annotated_fields_object() -> AnnotatedFieldsModel: + return AnnotatedFieldsModel( + max_length_str="short", + custom_json={"key": "value"}, + ) + + +T = TypeVar("T") + + +class GenericModel(BaseModel, Generic[T]): + value: T + values: List[T] + + def _check_instance(self): + assert isinstance(self.value, str) + assert isinstance(self.values, list) + assert all(isinstance(v, str) for v in self.values) + assert self.value == "single" + assert self.values == ["multiple", "values"] + + +def make_generic_string_object() -> GenericModel[str]: + return GenericModel[str]( + value="single", + values=["multiple", "values"], + ) + + class PydanticModel(BaseModel): ip_field: IPv4Address string_field_assigned_field: str = Field() @@ -143,6 +355,13 @@ def _check_instance(self): PydanticModels = Union[ + BasicTypesModel, + ComplexTypesModel, + SpecialTypesModel, + ParentModel, + FieldFeaturesModel, + AnnotatedFieldsModel, + GenericModel, PydanticModel, PydanticDatetimeModel, PydanticDateModel, @@ -166,7 +385,7 @@ def _assert_timedelta_validity(td: timedelta): def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: - return [ + objects = [ PydanticModel( ip_field=IPv4Address("127.0.0.1"), string_field_assigned_field="my-string", @@ -175,10 +394,20 @@ def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: union_field="my-string", ), ] - - -def make_heterogenous_list_of_pydantic_objects() -> List[PydanticModels]: - return [ + for o in objects: + o._check_instance() + return objects + + +def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: + objects = [ + make_basic_types_object(), + make_complex_types_object(), + make_special_types_object(), + make_nested_object(), + make_field_features_object(), + make_annotated_fields_object(), + make_generic_string_object(), PydanticModel( ip_field=IPv4Address("127.0.0.1"), string_field_assigned_field="my-string", @@ -220,6 +449,9 @@ def make_heterogenous_list_of_pydantic_objects() -> List[PydanticModels]: ], ), ] + for o in objects: + o._check_instance() + return objects @activity.defn @@ -237,7 +469,7 @@ async def heterogeneous_list_of_pydantic_models_activity( @workflow.defn -class HomogenousListOfPydanticObjectsWorkflow: +class HomogeneousListOfPydanticObjectsWorkflow: @workflow.run async def run(self, models: List[PydanticModel]) -> List[PydanticModel]: return await workflow.execute_activity( @@ -248,7 +480,7 @@ async def run(self, models: List[PydanticModel]) -> List[PydanticModel]: @workflow.defn -class HeterogenousListOfPydanticObjectsWorkflow: +class HeterogeneousListOfPydanticObjectsWorkflow: @workflow.run async def run(self, models: List[PydanticModels]) -> List[PydanticModels]: return await workflow.execute_activity( @@ -269,39 +501,43 @@ async def test_homogeneous_list_of_pydantic_objects(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[HomogenousListOfPydanticObjectsWorkflow], + workflows=[HomogeneousListOfPydanticObjectsWorkflow], activities=[homogeneous_list_of_pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( - HomogenousListOfPydanticObjectsWorkflow.run, + HomogeneousListOfPydanticObjectsWorkflow.run, orig_pydantic_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, ) assert orig_pydantic_objects == round_tripped_pydantic_objects + for o in round_tripped_pydantic_objects: + o._check_instance() -async def test_heterogenous_list_of_pydantic_objects(client: Client): +async def test_heterogeneous_list_of_pydantic_objects(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_pydantic_objects = make_heterogenous_list_of_pydantic_objects() + orig_pydantic_objects = make_heterogeneous_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, - workflows=[HeterogenousListOfPydanticObjectsWorkflow], + workflows=[HeterogeneousListOfPydanticObjectsWorkflow], activities=[heterogeneous_list_of_pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( - HeterogenousListOfPydanticObjectsWorkflow.run, + HeterogeneousListOfPydanticObjectsWorkflow.run, orig_pydantic_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, ) assert orig_pydantic_objects == round_tripped_pydantic_objects + for o in round_tripped_pydantic_objects: + o._check_instance() @dataclasses.dataclass @@ -342,7 +578,7 @@ async def test_mixed_collection_types(client: Client): task_queue_name = str(uuid.uuid4()) orig_dataclass_objects = make_dataclass_objects() - orig_pydantic_objects = make_heterogenous_list_of_pydantic_objects() + orig_pydantic_objects = make_heterogeneous_list_of_pydantic_objects() async with Worker( client, @@ -361,13 +597,15 @@ async def test_mixed_collection_types(client: Client): ) assert orig_dataclass_objects == round_tripped_dataclass_objects assert orig_pydantic_objects == round_tripped_pydantic_objects + for o in round_tripped_pydantic_objects: + o._check_instance() @workflow.defn class PydanticModelUsageWorkflow: @workflow.run async def run(self) -> None: - for o in make_heterogenous_list_of_pydantic_objects(): + for o in make_heterogeneous_list_of_pydantic_objects(): o._check_instance() From dbba4ce72d7b04e6e3e62524f98eb960baced6ff Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 20:28:53 -0500 Subject: [PATCH 36/96] Test pathlib.Path --- tests/contrib/test_pydantic.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index e1d8b258c..250a44f22 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -2,6 +2,7 @@ import uuid from datetime import date, datetime, timedelta from ipaddress import IPv4Address +from pathlib import Path from typing import ( Annotated, Any, @@ -100,7 +101,7 @@ class SpecialTypesModel(BaseModel): datetime_field: datetime date_field: date timedelta_field: timedelta - # path_field: Path + path_field: Path uuid_field: uuid.UUID ip_field: IPv4Address @@ -108,13 +109,13 @@ def _check_instance(self): assert isinstance(self.datetime_field, datetime) assert isinstance(self.date_field, date) assert isinstance(self.timedelta_field, timedelta) - # assert isinstance(self.path_field, Path) + assert isinstance(self.path_field, Path) assert isinstance(self.uuid_field, uuid.UUID) assert isinstance(self.ip_field, IPv4Address) assert self.datetime_field == datetime(2000, 1, 2, 3, 4, 5) assert self.date_field == date(2000, 1, 2) assert self.timedelta_field == timedelta(days=1, hours=2) - # assert self.path_field == Path("test/path") + assert self.path_field == Path("test/path") assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") assert self.ip_field == IPv4Address("127.0.0.1") @@ -124,7 +125,7 @@ def make_special_types_object() -> SpecialTypesModel: datetime_field=datetime(2000, 1, 2, 3, 4, 5), date_field=date(2000, 1, 2), timedelta_field=timedelta(days=1, hours=2), - # path_field=Path("test/path"), + path_field=Path("test/path"), uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), ip_field=IPv4Address("127.0.0.1"), ) From c92486d8a0535a394286ab0ec29b161b74d2cb17 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 20:25:32 -0500 Subject: [PATCH 37/96] Fix pathlib.Path usage --- temporalio/worker/workflow_sandbox/_restrictions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 88b595386..71098df6e 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -1065,7 +1065,7 @@ def __get_pydantic_core_schema__( ) __str__ = _RestrictedProxyLookup(str) # type: ignore __bytes__ = _RestrictedProxyLookup(bytes) - __format__ = _RestrictedProxyLookup() # type: ignore + __format__ = _RestrictedProxyLookup(format) # type: ignore __lt__ = _RestrictedProxyLookup(operator.lt) __le__ = _RestrictedProxyLookup(operator.le) __eq__ = _RestrictedProxyLookup(operator.eq) # type: ignore From f52c2e8a6062606009dab10a08239b53bb17d6a4 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 21:11:12 -0500 Subject: [PATCH 38/96] Delete redundant fix --- temporalio/contrib/pydantic.py | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index eb307c7fc..02c6dcd78 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -25,7 +25,6 @@ # pydantic v1 from pydantic.json import pydantic_encoder as to_jsonable_python # type: ignore -import temporalio.workflow from temporalio.converter import ( AdvancedJSONEncoder, CompositePayloadConverter, @@ -34,11 +33,9 @@ JSONPlainPayloadConverter, JSONTypeConverter, ) -from temporalio.worker.workflow_sandbox._restrictions import RestrictionContext # Note that in addition to the implementation in this module, _RestrictedProxy -# implements __get_pydantic_core_schema__ so that pydantic unwraps proxied types -# when determining the schema. +# implements __get_pydantic_core_schema__ so that pydantic unwraps proxied types. class PydanticModelTypeConverter(JSONTypeConverter): @@ -53,15 +50,6 @@ def to_typed_value(self, hint: Type, value: Any) -> Any: raise TypeError( f"Cannot convert to {model}, value is {type(value)} not dict" ) - if temporalio.workflow.unsafe.in_sandbox(): - # Unwrap proxied model field types so that Pydantic can call their constructors - model = pydantic.create_model( - model.__name__, - **{ # type: ignore - name: (RestrictionContext.unwrap_if_proxied(f.annotation), f) - for name, f in model.model_fields.items() - }, - ) if hasattr(model, "model_validate"): return model.model_validate(value) elif hasattr(model, "parse_obj"): From 298ee64753110916bbfa7039ea1a4e4001346290 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 6 Feb 2025 21:16:24 -0500 Subject: [PATCH 39/96] Remove redundant isinstance check --- temporalio/worker/workflow_sandbox/_restrictions.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 71098df6e..6b23c451e 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -961,8 +961,7 @@ def _is_restrictable(v: Any) -> bool: str, bytes, bytearray, - datetime.date, - datetime.datetime, + datetime.date, # from which datetime.datetime inherits ), ) From d2a956a819a2e82a3dee3511da422bb58eb64195 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 07:11:27 -0500 Subject: [PATCH 40/96] Deduplicate tests --- tests/contrib/test_pydantic.py | 143 ++++++++++++--------------------- 1 file changed, 52 insertions(+), 91 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 250a44f22..be467f65f 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -15,7 +15,6 @@ Tuple, TypeVar, Union, - get_type_hints, ) from annotated_types import Len @@ -229,42 +228,6 @@ def make_generic_string_object() -> GenericModel[str]: ) -class PydanticModel(BaseModel): - ip_field: IPv4Address - string_field_assigned_field: str = Field() - string_field_with_default: str = Field(default_factory=lambda: "my-string") - annotated_list_of_str: Annotated[ - List[str], Field(), WithJsonSchema({"extra": "data"}) - ] - str_short_sequence: ShortSequence[List[str]] - union_field: Union[int, str] - - def _check_instance(self): - assert isinstance(self.ip_field, IPv4Address) - assert isinstance(self.string_field_assigned_field, str) - assert isinstance(self.string_field_with_default, str) - assert isinstance(self.annotated_list_of_str, list) - assert isinstance(self.str_short_sequence, list) - assert self.annotated_list_of_str == ["my-string-1", "my-string-2"] - assert self.str_short_sequence == ["my-string-1", "my-string-2"] - assert isinstance(self.union_field, str) - assert self.union_field == "my-string" - - assert get_type_hints(self) == { - "ip_field": IPv4Address, - "string_field_assigned_field": str, - "string_field_with_default": str, - "annotated_list_of_str": List[str], - "str_short_sequence": List[str], - # TODO: why not - # "annotated_list_of_str": Annotated[ - # List[str], Field(), WithJsonSchema({"extra": "data"}) - # ], - # "str_short_sequence": ShortSequence[List[str]], - "union_field": Union[int, str], - } - - class PydanticDatetimeModel(BaseModel): datetime_field: datetime datetime_field_assigned_field: datetime = Field() @@ -295,6 +258,22 @@ def _check_instance(self): ] +def make_pydantic_datetime_object() -> PydanticDatetimeModel: + return PydanticDatetimeModel( + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), + annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), + annotated_list_of_datetime=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2001, 11, 12, 13, 14, 15), + ], + datetime_short_sequence=[ + datetime(2000, 1, 2, 3, 4, 5), + datetime(2001, 11, 12, 13, 14, 15), + ], + ) + + class PydanticDateModel(BaseModel): date_field: date date_field_assigned_field: date = Field() @@ -323,6 +302,16 @@ def _check_instance(self): ] +def make_pydantic_date_object() -> PydanticDateModel: + return PydanticDateModel( + date_field=date(2000, 1, 2), + date_field_assigned_field=date(2000, 1, 2), + annotated_date=date(2000, 1, 2), + annotated_list_of_date=[date(2000, 1, 2), date(2001, 11, 12)], + date_short_sequence=[date(2000, 1, 2), date(2001, 11, 12)], + ) + + class PydanticTimedeltaModel(BaseModel): timedelta_field: timedelta timedelta_field_assigned_field: timedelta = Field() @@ -355,6 +344,22 @@ def _check_instance(self): ] +def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: + return PydanticTimedeltaModel( + timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta_field_assigned_field=timedelta(1, 2, 3, 4, 5, 6, 7), + annotated_timedelta=timedelta(1, 2, 3, 4, 5, 6, 7), + annotated_list_of_timedelta=[ + timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta(2, 3, 4, 5, 6, 7, 8), + ], + timedelta_short_sequence=[ + timedelta(1, 2, 3, 4, 5, 6, 7), + timedelta(2, 3, 4, 5, 6, 7, 8), + ], + ) + + PydanticModels = Union[ BasicTypesModel, ComplexTypesModel, @@ -363,7 +368,6 @@ def _check_instance(self): FieldFeaturesModel, AnnotatedFieldsModel, GenericModel, - PydanticModel, PydanticDatetimeModel, PydanticDateModel, PydanticTimedeltaModel, @@ -385,16 +389,8 @@ def _assert_timedelta_validity(td: timedelta): assert issubclass(td.__class__, timedelta) -def make_homogeneous_list_of_pydantic_objects() -> List[PydanticModel]: - objects = [ - PydanticModel( - ip_field=IPv4Address("127.0.0.1"), - string_field_assigned_field="my-string", - annotated_list_of_str=["my-string-1", "my-string-2"], - str_short_sequence=["my-string-1", "my-string-2"], - union_field="my-string", - ), - ] +def make_homogeneous_list_of_pydantic_objects() -> List[PydanticDatetimeModel]: + objects = [make_pydantic_datetime_object()] for o in objects: o._check_instance() return objects @@ -409,46 +405,9 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: make_field_features_object(), make_annotated_fields_object(), make_generic_string_object(), - PydanticModel( - ip_field=IPv4Address("127.0.0.1"), - string_field_assigned_field="my-string", - annotated_list_of_str=["my-string-1", "my-string-2"], - str_short_sequence=["my-string-1", "my-string-2"], - union_field="my-string", - ), - PydanticDatetimeModel( - datetime_field=datetime(2000, 1, 2, 3, 4, 5), - datetime_field_assigned_field=datetime(2000, 1, 2, 3, 4, 5), - annotated_datetime=datetime(2000, 1, 2, 3, 4, 5), - annotated_list_of_datetime=[ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2001, 11, 12, 13, 14, 15), - ], - datetime_short_sequence=[ - datetime(2000, 1, 2, 3, 4, 5), - datetime(2001, 11, 12, 13, 14, 15), - ], - ), - PydanticDateModel( - date_field=date(2000, 1, 2), - date_field_assigned_field=date(2000, 1, 2), - annotated_date=date(2000, 1, 2), - annotated_list_of_date=[date(2000, 1, 2), date(2001, 11, 12)], - date_short_sequence=[date(2000, 1, 2), date(2001, 11, 12)], - ), - PydanticTimedeltaModel( - timedelta_field=timedelta(1, 2, 3, 4, 5, 6, 7), - timedelta_field_assigned_field=timedelta(1, 2, 3, 4, 5, 6, 7), - annotated_timedelta=timedelta(1, 2, 3, 4, 5, 6, 7), - annotated_list_of_timedelta=[ - timedelta(1, 2, 3, 4, 5, 6, 7), - timedelta(2, 3, 4, 5, 6, 7, 8), - ], - timedelta_short_sequence=[ - timedelta(1, 2, 3, 4, 5, 6, 7), - timedelta(2, 3, 4, 5, 6, 7, 8), - ], - ), + make_pydantic_datetime_object(), + make_pydantic_date_object(), + make_pydantic_timedelta_object(), ] for o in objects: o._check_instance() @@ -457,8 +416,8 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: @activity.defn async def homogeneous_list_of_pydantic_models_activity( - models: List[PydanticModel], -) -> List[PydanticModel]: + models: List[PydanticDatetimeModel], +) -> List[PydanticDatetimeModel]: return models @@ -472,7 +431,9 @@ async def heterogeneous_list_of_pydantic_models_activity( @workflow.defn class HomogeneousListOfPydanticObjectsWorkflow: @workflow.run - async def run(self, models: List[PydanticModel]) -> List[PydanticModel]: + async def run( + self, models: List[PydanticDatetimeModel] + ) -> List[PydanticDatetimeModel]: return await workflow.execute_activity( homogeneous_list_of_pydantic_models_activity, models, From 725792f9b32bc8e25e3dd22b7979a48782f66aed Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 07:40:09 -0500 Subject: [PATCH 41/96] Clean up --- tests/contrib/test_pydantic.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index be467f65f..326edf4e4 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -37,7 +37,7 @@ class BasicTypesModel(BaseModel): bytes_field: bytes none_field: None - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.int_field, int) assert isinstance(self.float_field, float) assert isinstance(self.str_field, str) @@ -70,7 +70,7 @@ class ComplexTypesModel(BaseModel): union_field: Union[str, int] optional_field: Optional[str] - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.list_field, list) assert isinstance(self.dict_field, dict) assert isinstance(self.set_field, set) @@ -104,7 +104,7 @@ class SpecialTypesModel(BaseModel): uuid_field: uuid.UUID ip_field: IPv4Address - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.datetime_field, datetime) assert isinstance(self.date_field, date) assert isinstance(self.timedelta_field, timedelta) @@ -139,7 +139,7 @@ class ParentModel(BaseModel): child: ChildModel children: List[ChildModel] - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.child, ChildModel) assert isinstance(self.children, list) assert all(isinstance(child, ChildModel) for child in self.children) @@ -170,7 +170,7 @@ class FieldFeaturesModel(BaseModel): field_with_constraints: int = Field(gt=0, lt=100) field_with_alias: str = Field(alias="different_name") - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.field_with_default, str) assert isinstance(self.field_with_factory, datetime) assert isinstance(self.field_with_constraints, int) @@ -191,7 +191,7 @@ class AnnotatedFieldsModel(BaseModel): max_length_str: Annotated[str, Len(max_length=10)] custom_json: Annotated[Dict[str, Any], WithJsonSchema({"extra": "data"})] - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.max_length_str, str) assert isinstance(self.custom_json, dict) assert len(self.max_length_str) <= 10 @@ -213,7 +213,7 @@ class GenericModel(BaseModel, Generic[T]): value: T values: List[T] - def _check_instance(self): + def _check_instance(self) -> None: assert isinstance(self.value, str) assert isinstance(self.values, list) assert all(isinstance(v, str) for v in self.values) @@ -367,7 +367,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ParentModel, FieldFeaturesModel, AnnotatedFieldsModel, - GenericModel, + GenericModel[Any], PydanticDatetimeModel, PydanticDateModel, PydanticTimedeltaModel, @@ -410,8 +410,8 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: make_pydantic_timedelta_object(), ] for o in objects: - o._check_instance() - return objects + o._check_instance() # type: ignore + return objects # type: ignore @activity.defn From 7032a70194f83300f3a5575a0667d90ee9871fa1 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 07:58:55 -0500 Subject: [PATCH 42/96] Expand --- tests/contrib/test_pydantic.py | 150 ++++++++++++++++++++++++++++++--- 1 file changed, 137 insertions(+), 13 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 326edf4e4..1662612e3 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,15 +1,24 @@ +import array +import collections import dataclasses +import decimal +import fractions +import re import uuid from datetime import date, datetime, timedelta +from enum import Enum, IntEnum from ipaddress import IPv4Address from pathlib import Path from typing import ( Annotated, Any, + Callable, Dict, Generic, + Hashable, List, Optional, + Pattern, Sequence, Set, Tuple, @@ -29,36 +38,152 @@ ShortSequence = Annotated[SequenceType, Len(max_length=2)] -class BasicTypesModel(BaseModel): +class StandardTypesModel(BaseModel): + # Boolean + bool_field: bool + + # Numbers int_field: int float_field: float + decimal_field: decimal.Decimal + complex_field: complex + fraction_field: fractions.Fraction + + # Strings and Bytes str_field: str - bool_field: bool bytes_field: bytes + + # None none_field: None + # Enums + str_enum_field: Enum + int_enum_field: IntEnum + + # Collections + list_field: list + tuple_field: tuple + set_field: set + frozenset_field: frozenset + deque_field: collections.deque + array_field: array.array + + # Mappings + dict_field: dict + defaultdict_field: collections.defaultdict + counter_field: collections.Counter + + # Other Types + pattern_field: Pattern + hashable_field: Hashable + any_field: Any + callable_field: Callable + def _check_instance(self) -> None: - assert isinstance(self.int_field, int) - assert isinstance(self.float_field, float) - assert isinstance(self.str_field, str) + # Boolean checks assert isinstance(self.bool_field, bool) - assert isinstance(self.bytes_field, bytes) - assert self.none_field is None + assert self.bool_field is True + + # Number checks + assert isinstance(self.int_field, int) assert self.int_field == 42 + assert isinstance(self.float_field, float) assert self.float_field == 3.14 + assert isinstance(self.decimal_field, decimal.Decimal) + assert self.decimal_field == decimal.Decimal("3.14") + assert isinstance(self.complex_field, complex) + assert self.complex_field == complex(1, 2) + assert isinstance(self.fraction_field, fractions.Fraction) + assert self.fraction_field == fractions.Fraction(22, 7) + + # String and Bytes checks + assert isinstance(self.str_field, str) assert self.str_field == "hello" - assert self.bool_field is True + assert isinstance(self.bytes_field, bytes) assert self.bytes_field == b"world" + # None check + assert self.none_field is None + + # Enum checks + assert isinstance(self.str_enum_field, Enum) + assert isinstance(self.int_enum_field, IntEnum) + + # Collection checks + assert isinstance(self.list_field, list) + assert self.list_field == [1, 2, 3] + assert isinstance(self.tuple_field, tuple) + assert self.tuple_field == (1, 2, 3) + assert isinstance(self.set_field, set) + assert self.set_field == {1, 2, 3} + assert isinstance(self.frozenset_field, frozenset) + assert self.frozenset_field == frozenset([1, 2, 3]) + assert isinstance(self.deque_field, collections.deque) + assert list(self.deque_field) == [1, 2, 3] + assert isinstance(self.array_field, array.array) + assert list(self.array_field) == [1, 2, 3] + + # Mapping checks + assert isinstance(self.dict_field, dict) + assert self.dict_field == {"a": 1, "b": 2} + assert isinstance(self.defaultdict_field, collections.defaultdict) + assert dict(self.defaultdict_field) == {"a": 1, "b": 2} + assert isinstance(self.counter_field, collections.Counter) + assert dict(self.counter_field) == {"a": 1, "b": 2} + + # Other type checks + assert isinstance(self.pattern_field, Pattern) + assert self.pattern_field.pattern == r"\d+" + assert isinstance(self.hashable_field, Hashable) + assert self.hashable_field == "test" + assert self.any_field == "anything goes" + assert callable(self.callable_field) -def make_basic_types_object() -> BasicTypesModel: - return BasicTypesModel( + +class FruitEnum(str, Enum): + apple = "apple" + banana = "banana" + + +class NumberEnum(IntEnum): + one = 1 + two = 2 + + +def make_standard_types_object() -> StandardTypesModel: + return StandardTypesModel( + # Boolean + bool_field=True, + # Numbers int_field=42, float_field=3.14, + decimal_field=decimal.Decimal("3.14"), + complex_field=complex(1, 2), + fraction_field=fractions.Fraction(22, 7), + # Strings and Bytes str_field="hello", - bool_field=True, bytes_field=b"world", + # None none_field=None, + # Enums + str_enum_field=FruitEnum.apple, + int_enum_field=NumberEnum.one, + # Collections + list_field=[1, 2, 3], + tuple_field=(1, 2, 3), + set_field={1, 2, 3}, + frozenset_field=frozenset([1, 2, 3]), + deque_field=collections.deque([1, 2, 3]), + array_field=array.array("i", [1, 2, 3]), + # Mappings + dict_field={"a": 1, "b": 2}, + defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), + counter_field=collections.Counter({"a": 1, "b": 2}), + # Other Types + pattern_field=re.compile(r"\d+"), + hashable_field="test", + any_field="anything goes", + callable_field=lambda x: x, ) @@ -361,7 +486,6 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: PydanticModels = Union[ - BasicTypesModel, ComplexTypesModel, SpecialTypesModel, ParentModel, @@ -398,7 +522,7 @@ def make_homogeneous_list_of_pydantic_objects() -> List[PydanticDatetimeModel]: def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: objects = [ - make_basic_types_object(), + make_standard_types_object(), make_complex_types_object(), make_special_types_object(), make_nested_object(), From 4005a6513ef4c0c29b58866d7d91dd192512e0e7 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 08:54:26 -0500 Subject: [PATCH 43/96] Clean up --- tests/contrib/test_pydantic.py | 55 ++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 25 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 1662612e3..df55727e5 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -38,6 +38,16 @@ ShortSequence = Annotated[SequenceType, Len(max_length=2)] +class FruitEnum(str, Enum): + apple = "apple" + banana = "banana" + + +class NumberEnum(IntEnum): + one = 1 + two = 2 + + class StandardTypesModel(BaseModel): # Boolean bool_field: bool @@ -57,8 +67,8 @@ class StandardTypesModel(BaseModel): none_field: None # Enums - str_enum_field: Enum - int_enum_field: IntEnum + str_enum_field: FruitEnum + int_enum_field: NumberEnum # Collections list_field: list @@ -140,16 +150,6 @@ def _check_instance(self) -> None: assert callable(self.callable_field) -class FruitEnum(str, Enum): - apple = "apple" - banana = "banana" - - -class NumberEnum(IntEnum): - one = 1 - two = 2 - - def make_standard_types_object() -> StandardTypesModel: return StandardTypesModel( # Boolean @@ -485,7 +485,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ) -PydanticModels = Union[ +HeterogeneousPydanticModels = Union[ ComplexTypesModel, SpecialTypesModel, ParentModel, @@ -498,6 +498,9 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ] +HomogeneousPydanticModels = StandardTypesModel + + def _assert_datetime_validity(dt: datetime): assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) @@ -513,14 +516,14 @@ def _assert_timedelta_validity(td: timedelta): assert issubclass(td.__class__, timedelta) -def make_homogeneous_list_of_pydantic_objects() -> List[PydanticDatetimeModel]: - objects = [make_pydantic_datetime_object()] +def make_homogeneous_list_of_pydantic_objects() -> List[HomogeneousPydanticModels]: + objects = [make_standard_types_object()] for o in objects: o._check_instance() return objects -def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: +def make_heterogeneous_list_of_pydantic_objects() -> List[HeterogeneousPydanticModels]: objects = [ make_standard_types_object(), make_complex_types_object(), @@ -540,15 +543,15 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[PydanticModels]: @activity.defn async def homogeneous_list_of_pydantic_models_activity( - models: List[PydanticDatetimeModel], -) -> List[PydanticDatetimeModel]: + models: List[HomogeneousPydanticModels], +) -> List[HomogeneousPydanticModels]: return models @activity.defn async def heterogeneous_list_of_pydantic_models_activity( - models: List[PydanticModels], -) -> List[PydanticModels]: + models: List[HeterogeneousPydanticModels], +) -> List[HeterogeneousPydanticModels]: return models @@ -556,8 +559,8 @@ async def heterogeneous_list_of_pydantic_models_activity( class HomogeneousListOfPydanticObjectsWorkflow: @workflow.run async def run( - self, models: List[PydanticDatetimeModel] - ) -> List[PydanticDatetimeModel]: + self, models: List[HomogeneousPydanticModels] + ) -> List[HomogeneousPydanticModels]: return await workflow.execute_activity( homogeneous_list_of_pydantic_models_activity, models, @@ -568,7 +571,9 @@ async def run( @workflow.defn class HeterogeneousListOfPydanticObjectsWorkflow: @workflow.run - async def run(self, models: List[PydanticModels]) -> List[PydanticModels]: + async def run( + self, models: List[HeterogeneousPydanticModels] + ) -> List[HeterogeneousPydanticModels]: return await workflow.execute_activity( heterogeneous_list_of_pydantic_models_activity, models, @@ -642,11 +647,11 @@ async def run( self, input: Tuple[ List[MyDataClass], - List[PydanticModels], + List[HeterogeneousPydanticModels], ], ) -> Tuple[ List[MyDataClass], - List[PydanticModels], + List[HeterogeneousPydanticModels], ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( From 815fa846ddc914330b612073cd596233f5b9707e Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 08:17:57 -0500 Subject: [PATCH 44/96] Disable field types that don't work --- tests/contrib/test_pydantic.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index df55727e5..df9e7ebbb 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -1,4 +1,3 @@ -import array import collections import dataclasses import decimal @@ -12,7 +11,6 @@ from typing import ( Annotated, Any, - Callable, Dict, Generic, Hashable, @@ -76,18 +74,18 @@ class StandardTypesModel(BaseModel): set_field: set frozenset_field: frozenset deque_field: collections.deque - array_field: array.array + # array_field: array.array # Mappings dict_field: dict - defaultdict_field: collections.defaultdict + # defaultdict_field: collections.defaultdict counter_field: collections.Counter # Other Types pattern_field: Pattern hashable_field: Hashable any_field: Any - callable_field: Callable + # callable_field: Callable def _check_instance(self) -> None: # Boolean checks @@ -130,14 +128,14 @@ def _check_instance(self) -> None: assert self.frozenset_field == frozenset([1, 2, 3]) assert isinstance(self.deque_field, collections.deque) assert list(self.deque_field) == [1, 2, 3] - assert isinstance(self.array_field, array.array) - assert list(self.array_field) == [1, 2, 3] + # assert isinstance(self.array_field, array.array) + # assert list(self.array_field) == [1, 2, 3] # Mapping checks assert isinstance(self.dict_field, dict) assert self.dict_field == {"a": 1, "b": 2} - assert isinstance(self.defaultdict_field, collections.defaultdict) - assert dict(self.defaultdict_field) == {"a": 1, "b": 2} + # assert isinstance(self.defaultdict_field, collections.defaultdict) + # assert dict(self.defaultdict_field) == {"a": 1, "b": 2} assert isinstance(self.counter_field, collections.Counter) assert dict(self.counter_field) == {"a": 1, "b": 2} @@ -147,7 +145,7 @@ def _check_instance(self) -> None: assert isinstance(self.hashable_field, Hashable) assert self.hashable_field == "test" assert self.any_field == "anything goes" - assert callable(self.callable_field) + # assert callable(self.callable_field) def make_standard_types_object() -> StandardTypesModel: @@ -174,16 +172,16 @@ def make_standard_types_object() -> StandardTypesModel: set_field={1, 2, 3}, frozenset_field=frozenset([1, 2, 3]), deque_field=collections.deque([1, 2, 3]), - array_field=array.array("i", [1, 2, 3]), + # array_field=array.array("i", [1, 2, 3]), # Mappings dict_field={"a": 1, "b": 2}, - defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), + # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), counter_field=collections.Counter({"a": 1, "b": 2}), # Other Types pattern_field=re.compile(r"\d+"), hashable_field="test", any_field="anything goes", - callable_field=lambda x: x, + # callable_field=lambda x: x, ) From aa4ad22caabead667fd29ebfbcdc164ec3ef91b6 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 09:39:17 -0500 Subject: [PATCH 45/96] Expand --- tests/contrib/test_pydantic.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index df9e7ebbb..afa2cf663 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -49,6 +49,8 @@ class NumberEnum(IntEnum): class StandardTypesModel(BaseModel): # Boolean bool_field: bool + bool_field_int: bool + bool_field_str: bool # Numbers int_field: int @@ -91,6 +93,10 @@ def _check_instance(self) -> None: # Boolean checks assert isinstance(self.bool_field, bool) assert self.bool_field is True + assert isinstance(self.bool_field_int, bool) + assert self.bool_field_int is True + assert isinstance(self.bool_field_str, bool) + assert self.bool_field_str is True # Number checks assert isinstance(self.int_field, int) @@ -152,6 +158,8 @@ def make_standard_types_object() -> StandardTypesModel: return StandardTypesModel( # Boolean bool_field=True, + bool_field_int=1, # type: ignore + bool_field_str="true", # type: ignore # Numbers int_field=42, float_field=3.14, @@ -484,6 +492,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: HeterogeneousPydanticModels = Union[ + StandardTypesModel, ComplexTypesModel, SpecialTypesModel, ParentModel, From 6b1bfd14b4d17f5318eaeecf11925036721ff596 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 10:30:00 -0500 Subject: [PATCH 46/96] Test special instead of standard model --- tests/contrib/test_pydantic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index afa2cf663..e1c32db0e 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -505,7 +505,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ] -HomogeneousPydanticModels = StandardTypesModel +HomogeneousPydanticModels = SpecialTypesModel def _assert_datetime_validity(dt: datetime): @@ -524,7 +524,7 @@ def _assert_timedelta_validity(td: timedelta): def make_homogeneous_list_of_pydantic_objects() -> List[HomogeneousPydanticModels]: - objects = [make_standard_types_object()] + objects = [make_special_types_object()] for o in objects: o._check_instance() return objects From 7534a7d4c121dd22c091cd363f2acc68cc1cb677 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 09:57:22 -0500 Subject: [PATCH 47/96] datetime variants --- tests/contrib/test_pydantic.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index e1c32db0e..5de03d302 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -4,7 +4,7 @@ import fractions import re import uuid -from datetime import date, datetime, timedelta +from datetime import date, datetime, timedelta, timezone from enum import Enum, IntEnum from ipaddress import IPv4Address from pathlib import Path @@ -229,6 +229,10 @@ def make_complex_types_object() -> ComplexTypesModel: class SpecialTypesModel(BaseModel): datetime_field: datetime + datetime_field_int: datetime + datetime_field_float: datetime + datetime_field_str_formatted: datetime + datetime_field_str_int: datetime date_field: date timedelta_field: timedelta path_field: Path @@ -236,13 +240,23 @@ class SpecialTypesModel(BaseModel): ip_field: IPv4Address def _check_instance(self) -> None: + dt = datetime(2000, 1, 2, 3, 4, 5) + dtz = datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) assert isinstance(self.datetime_field, datetime) + assert isinstance(self.datetime_field_int, datetime) + assert isinstance(self.datetime_field_float, datetime) + assert isinstance(self.datetime_field_str_formatted, datetime) + assert isinstance(self.datetime_field_str_int, datetime) assert isinstance(self.date_field, date) assert isinstance(self.timedelta_field, timedelta) assert isinstance(self.path_field, Path) assert isinstance(self.uuid_field, uuid.UUID) assert isinstance(self.ip_field, IPv4Address) - assert self.datetime_field == datetime(2000, 1, 2, 3, 4, 5) + assert self.datetime_field == dt + assert self.datetime_field_int == dtz + assert self.datetime_field_float == dtz + assert self.datetime_field_str_formatted == dtz + assert self.datetime_field_str_int == dtz assert self.date_field == date(2000, 1, 2) assert self.timedelta_field == timedelta(days=1, hours=2) assert self.path_field == Path("test/path") @@ -253,6 +267,11 @@ def _check_instance(self) -> None: def make_special_types_object() -> SpecialTypesModel: return SpecialTypesModel( datetime_field=datetime(2000, 1, 2, 3, 4, 5), + # 946800245 + datetime_field_int=946782245, # type: ignore + datetime_field_float=946782245.0, # type: ignore + datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore + datetime_field_str_int="946782245", # type: ignore date_field=date(2000, 1, 2), timedelta_field=timedelta(days=1, hours=2), path_field=Path("test/path"), From 2f1e17e0c47b2316d24e7f1e728f7b6ac66b1868 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 10:25:44 -0500 Subject: [PATCH 48/96] Add date datetime --- tests/contrib/test_pydantic.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 5de03d302..7c4e66e26 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -233,6 +233,7 @@ class SpecialTypesModel(BaseModel): datetime_field_float: datetime datetime_field_str_formatted: datetime datetime_field_str_int: datetime + datetime_field_date: datetime date_field: date timedelta_field: timedelta path_field: Path @@ -247,7 +248,7 @@ def _check_instance(self) -> None: assert isinstance(self.datetime_field_float, datetime) assert isinstance(self.datetime_field_str_formatted, datetime) assert isinstance(self.datetime_field_str_int, datetime) - assert isinstance(self.date_field, date) + assert isinstance(self.datetime_field_date, datetime) assert isinstance(self.timedelta_field, timedelta) assert isinstance(self.path_field, Path) assert isinstance(self.uuid_field, uuid.UUID) @@ -257,6 +258,7 @@ def _check_instance(self) -> None: assert self.datetime_field_float == dtz assert self.datetime_field_str_formatted == dtz assert self.datetime_field_str_int == dtz + assert self.datetime_field_date == datetime(2000, 1, 2) assert self.date_field == date(2000, 1, 2) assert self.timedelta_field == timedelta(days=1, hours=2) assert self.path_field == Path("test/path") @@ -272,6 +274,7 @@ def make_special_types_object() -> SpecialTypesModel: datetime_field_float=946782245.0, # type: ignore datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore datetime_field_str_int="946782245", # type: ignore + datetime_field_date=datetime(2000, 1, 2), date_field=date(2000, 1, 2), timedelta_field=timedelta(days=1, hours=2), path_field=Path("test/path"), From 747454bfaf3466793825d12c7eed0ec13592222c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 10:29:50 -0500 Subject: [PATCH 49/96] Time fields --- tests/contrib/test_pydantic.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 7c4e66e26..ee21eadaa 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -4,7 +4,7 @@ import fractions import re import uuid -from datetime import date, datetime, timedelta, timezone +from datetime import date, datetime, time, timedelta, timezone from enum import Enum, IntEnum from ipaddress import IPv4Address from pathlib import Path @@ -234,6 +234,10 @@ class SpecialTypesModel(BaseModel): datetime_field_str_formatted: datetime datetime_field_str_int: datetime datetime_field_date: datetime + + time_field: time + time_field_str: time + date_field: date timedelta_field: timedelta path_field: Path @@ -259,6 +263,8 @@ def _check_instance(self) -> None: assert self.datetime_field_str_formatted == dtz assert self.datetime_field_str_int == dtz assert self.datetime_field_date == datetime(2000, 1, 2) + assert self.time_field == time(3, 4, 5) + assert self.time_field_str == time(3, 4, 5, tzinfo=timezone.utc) assert self.date_field == date(2000, 1, 2) assert self.timedelta_field == timedelta(days=1, hours=2) assert self.path_field == Path("test/path") @@ -275,6 +281,8 @@ def make_special_types_object() -> SpecialTypesModel: datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore datetime_field_str_int="946782245", # type: ignore datetime_field_date=datetime(2000, 1, 2), + time_field=time(3, 4, 5), + time_field_str="03:04:05Z", # type: ignore date_field=date(2000, 1, 2), timedelta_field=timedelta(days=1, hours=2), path_field=Path("test/path"), From f94acf0e5c11035a20eddc8567209514356424ce Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 10:44:35 -0500 Subject: [PATCH 50/96] Revert "Test special instead of standard model" This reverts commit f322bfb1b44efeae7d98b022f5293798d5ad4e8c. --- tests/contrib/test_pydantic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index ee21eadaa..8f248fb88 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -535,7 +535,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ] -HomogeneousPydanticModels = SpecialTypesModel +HomogeneousPydanticModels = StandardTypesModel def _assert_datetime_validity(dt: datetime): @@ -554,7 +554,7 @@ def _assert_timedelta_validity(td: timedelta): def make_homogeneous_list_of_pydantic_objects() -> List[HomogeneousPydanticModels]: - objects = [make_special_types_object()] + objects = [make_standard_types_object()] for o in objects: o._check_instance() return objects From 3959a837cd695da9702a33715eac7572f1c2b8ff Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 10:49:36 -0500 Subject: [PATCH 51/96] namedtuple --- tests/contrib/test_pydantic.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 8f248fb88..d2e61a6e2 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -15,6 +15,7 @@ Generic, Hashable, List, + NamedTuple, Optional, Pattern, Sequence, @@ -193,6 +194,11 @@ def make_standard_types_object() -> StandardTypesModel: ) +class Point(NamedTuple): + x: int + y: int + + class ComplexTypesModel(BaseModel): list_field: List[str] dict_field: Dict[str, int] @@ -200,6 +206,7 @@ class ComplexTypesModel(BaseModel): tuple_field: Tuple[str, int] union_field: Union[str, int] optional_field: Optional[str] + named_tuple_field: Point def _check_instance(self) -> None: assert isinstance(self.list_field, list) @@ -214,6 +221,7 @@ def _check_instance(self) -> None: assert self.tuple_field == ("hello", 42) assert self.union_field == "string_or_int" assert self.optional_field == "present" + assert self.named_tuple_field == Point(x=1, y=2) def make_complex_types_object() -> ComplexTypesModel: @@ -224,6 +232,7 @@ def make_complex_types_object() -> ComplexTypesModel: tuple_field=("hello", 42), union_field="string_or_int", optional_field="present", + named_tuple_field=Point(x=1, y=2), ) From 9ec4c34157935b1d190db2adc9fecd3d2847965e Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 11:21:28 -0500 Subject: [PATCH 52/96] sequence field --- tests/contrib/test_pydantic.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index d2e61a6e2..23d569ead 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -77,6 +77,7 @@ class StandardTypesModel(BaseModel): set_field: set frozenset_field: frozenset deque_field: collections.deque + sequence_field: Sequence[int] # array_field: array.array # Mappings @@ -135,6 +136,8 @@ def _check_instance(self) -> None: assert self.frozenset_field == frozenset([1, 2, 3]) assert isinstance(self.deque_field, collections.deque) assert list(self.deque_field) == [1, 2, 3] + assert isinstance(self.sequence_field, tuple) + assert list(self.sequence_field) == [1, 2, 3] # assert isinstance(self.array_field, array.array) # assert list(self.array_field) == [1, 2, 3] @@ -181,6 +184,7 @@ def make_standard_types_object() -> StandardTypesModel: set_field={1, 2, 3}, frozenset_field=frozenset([1, 2, 3]), deque_field=collections.deque([1, 2, 3]), + sequence_field=(1, 2, 3), # array_field=array.array("i", [1, 2, 3]), # Mappings dict_field={"a": 1, "b": 2}, From 8508be578b5cf30538a5841dafde9fafcee2c48b Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 11:21:54 -0500 Subject: [PATCH 53/96] iterable field --- tests/contrib/test_pydantic.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 23d569ead..473535965 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -78,7 +78,7 @@ class StandardTypesModel(BaseModel): frozenset_field: frozenset deque_field: collections.deque sequence_field: Sequence[int] - # array_field: array.array + # Iterable[int] supported but not tested since original vs round-tripped do not compare equal # Mappings dict_field: dict @@ -136,10 +136,8 @@ def _check_instance(self) -> None: assert self.frozenset_field == frozenset([1, 2, 3]) assert isinstance(self.deque_field, collections.deque) assert list(self.deque_field) == [1, 2, 3] - assert isinstance(self.sequence_field, tuple) + assert isinstance(self.sequence_field, list) assert list(self.sequence_field) == [1, 2, 3] - # assert isinstance(self.array_field, array.array) - # assert list(self.array_field) == [1, 2, 3] # Mapping checks assert isinstance(self.dict_field, dict) @@ -184,8 +182,8 @@ def make_standard_types_object() -> StandardTypesModel: set_field={1, 2, 3}, frozenset_field=frozenset([1, 2, 3]), deque_field=collections.deque([1, 2, 3]), - sequence_field=(1, 2, 3), - # array_field=array.array("i", [1, 2, 3]), + # other sequence types are converted to list, as documented + sequence_field=[1, 2, 3], # Mappings dict_field={"a": 1, "b": 2}, # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), From 3df3c92d73ec1260a94160032e317e7b9aefe674 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 11:39:47 -0500 Subject: [PATCH 54/96] TypedDict --- tests/contrib/test_pydantic.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 473535965..ec7443a1e 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -27,6 +27,7 @@ from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema +from typing_extensions import TypedDict from temporalio import activity, workflow from temporalio.client import Client @@ -47,6 +48,11 @@ class NumberEnum(IntEnum): two = 2 +class UserTypedDict(TypedDict): + name: str + id: int + + class StandardTypesModel(BaseModel): # Boolean bool_field: bool @@ -84,6 +90,7 @@ class StandardTypesModel(BaseModel): dict_field: dict # defaultdict_field: collections.defaultdict counter_field: collections.Counter + typed_dict_field: UserTypedDict # Other Types pattern_field: Pattern @@ -146,6 +153,8 @@ def _check_instance(self) -> None: # assert dict(self.defaultdict_field) == {"a": 1, "b": 2} assert isinstance(self.counter_field, collections.Counter) assert dict(self.counter_field) == {"a": 1, "b": 2} + assert isinstance(self.typed_dict_field, dict) + assert self.typed_dict_field == {"name": "username", "id": 7} # Other type checks assert isinstance(self.pattern_field, Pattern) @@ -188,6 +197,7 @@ def make_standard_types_object() -> StandardTypesModel: dict_field={"a": 1, "b": 2}, # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), counter_field=collections.Counter({"a": 1, "b": 2}), + typed_dict_field={"name": "username", "id": 7}, # Other Types pattern_field=re.compile(r"\d+"), hashable_field="test", From 15ba77d5c52eb0195a57c70cf6f62a8f91bfd581 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Fri, 7 Feb 2025 16:42:51 -0500 Subject: [PATCH 55/96] Expand tests --- tests/contrib/test_pydantic.py | 42 ++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index ec7443a1e..a5920da7a 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -53,6 +53,18 @@ class UserTypedDict(TypedDict): id: int +class TypedDictModel(BaseModel): + typed_dict_field: UserTypedDict + + def _check_instance(self) -> None: + assert isinstance(self.typed_dict_field, dict) + assert self.typed_dict_field == {"name": "username", "id": 7} + + +def make_typed_dict_object() -> TypedDictModel: + return TypedDictModel(typed_dict_field={"name": "username", "id": 7}) + + class StandardTypesModel(BaseModel): # Boolean bool_field: bool @@ -632,6 +644,7 @@ class HeterogeneousListOfPydanticObjectsWorkflow: async def run( self, models: List[HeterogeneousPydanticModels] ) -> List[HeterogeneousPydanticModels]: + # TODO: test instantiation of models return await workflow.execute_activity( heterogeneous_list_of_pydantic_models_activity, models, @@ -639,6 +652,35 @@ async def run( ) +@workflow.defn +class InstantiationInSandboxWorkflow: + @workflow.run + async def run(self) -> None: + make_heterogeneous_list_of_pydantic_objects() + + +async def test_instantiation_outside_sandbox(): + make_heterogeneous_list_of_pydantic_objects() + + +async def test_instantiation_inside_sandbox(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[InstantiationInSandboxWorkflow], + ): + await client.execute_workflow( + InstantiationInSandboxWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + + async def test_homogeneous_list_of_pydantic_objects(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter From 9fc5d778f9ee4be7add8bb09c2d3b264ac88ae58 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 08:05:21 -0500 Subject: [PATCH 56/96] Always pass through typing_extensions Tests of Pydantic TypedDict fields break without this (pytest assertion rewriter) --- temporalio/worker/workflow_sandbox/_restrictions.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 6b23c451e..85049a5ea 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -444,9 +444,10 @@ def with_child_unrestricted(self, *child_path: str) -> SandboxMatcher: # Due to a metaclass conflict in sandbox, we need zipfile module to pass # through always "zipfile", - # This is a very general module needed by many things including pytest's + # Very general modules needed by many things including pytest's # assertion rewriter "typing", + "typing_extensions", # Required due to https://github.com/protocolbuffers/protobuf/issues/10143 # for older versions. This unfortunately means that on those versions, # everyone using Python protos has to pass their module through. From 5c6e44c92c43b3d51c5d7186954f67d5cda62a68 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 08:23:12 -0500 Subject: [PATCH 57/96] Make activities generic --- tests/contrib/test_pydantic.py | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index a5920da7a..218eea231 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -37,6 +37,9 @@ SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) ShortSequence = Annotated[SequenceType, Len(max_length=2)] +M = TypeVar("M", bound=BaseModel) +T = TypeVar("T") + class FruitEnum(str, Enum): apple = "apple" @@ -400,9 +403,6 @@ def make_annotated_fields_object() -> AnnotatedFieldsModel: ) -T = TypeVar("T") - - class GenericModel(BaseModel, Generic[T]): value: T values: List[T] @@ -612,16 +612,7 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[HeterogeneousPydanticM @activity.defn -async def homogeneous_list_of_pydantic_models_activity( - models: List[HomogeneousPydanticModels], -) -> List[HomogeneousPydanticModels]: - return models - - -@activity.defn -async def heterogeneous_list_of_pydantic_models_activity( - models: List[HeterogeneousPydanticModels], -) -> List[HeterogeneousPydanticModels]: +async def pydantic_objects_activity(models: List[M]) -> List[M]: return models @@ -632,7 +623,7 @@ async def run( self, models: List[HomogeneousPydanticModels] ) -> List[HomogeneousPydanticModels]: return await workflow.execute_activity( - homogeneous_list_of_pydantic_models_activity, + pydantic_objects_activity, models, start_to_close_timeout=timedelta(minutes=1), ) @@ -646,7 +637,7 @@ async def run( ) -> List[HeterogeneousPydanticModels]: # TODO: test instantiation of models return await workflow.execute_activity( - heterogeneous_list_of_pydantic_models_activity, + pydantic_objects_activity, models, start_to_close_timeout=timedelta(minutes=1), ) @@ -693,7 +684,7 @@ async def test_homogeneous_list_of_pydantic_objects(client: Client): client, task_queue=task_queue_name, workflows=[HomogeneousListOfPydanticObjectsWorkflow], - activities=[homogeneous_list_of_pydantic_models_activity], + activities=[pydantic_objects_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( HomogeneousListOfPydanticObjectsWorkflow.run, @@ -718,7 +709,7 @@ async def test_heterogeneous_list_of_pydantic_objects(client: Client): client, task_queue=task_queue_name, workflows=[HeterogeneousListOfPydanticObjectsWorkflow], - activities=[heterogeneous_list_of_pydantic_models_activity], + activities=[pydantic_objects_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( HeterogeneousListOfPydanticObjectsWorkflow.run, @@ -755,7 +746,7 @@ async def run( ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( - heterogeneous_list_of_pydantic_models_activity, + pydantic_objects_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -775,7 +766,7 @@ async def test_mixed_collection_types(client: Client): client, task_queue=task_queue_name, workflows=[MixedCollectionTypesWorkflow], - activities=[heterogeneous_list_of_pydantic_models_activity], + activities=[pydantic_objects_activity], ): ( round_tripped_dataclass_objects, From 1b2b7311afd001ec62386c62d0a71e9bdbd5619f Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 09:54:54 -0500 Subject: [PATCH 58/96] Revert "Make activities generic" This reverts commit d414ff958611a9c2a7421584e20360f52d579171. --- tests/contrib/test_pydantic.py | 29 +++++++++++++++++++---------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 218eea231..a5920da7a 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -37,9 +37,6 @@ SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) ShortSequence = Annotated[SequenceType, Len(max_length=2)] -M = TypeVar("M", bound=BaseModel) -T = TypeVar("T") - class FruitEnum(str, Enum): apple = "apple" @@ -403,6 +400,9 @@ def make_annotated_fields_object() -> AnnotatedFieldsModel: ) +T = TypeVar("T") + + class GenericModel(BaseModel, Generic[T]): value: T values: List[T] @@ -612,7 +612,16 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[HeterogeneousPydanticM @activity.defn -async def pydantic_objects_activity(models: List[M]) -> List[M]: +async def homogeneous_list_of_pydantic_models_activity( + models: List[HomogeneousPydanticModels], +) -> List[HomogeneousPydanticModels]: + return models + + +@activity.defn +async def heterogeneous_list_of_pydantic_models_activity( + models: List[HeterogeneousPydanticModels], +) -> List[HeterogeneousPydanticModels]: return models @@ -623,7 +632,7 @@ async def run( self, models: List[HomogeneousPydanticModels] ) -> List[HomogeneousPydanticModels]: return await workflow.execute_activity( - pydantic_objects_activity, + homogeneous_list_of_pydantic_models_activity, models, start_to_close_timeout=timedelta(minutes=1), ) @@ -637,7 +646,7 @@ async def run( ) -> List[HeterogeneousPydanticModels]: # TODO: test instantiation of models return await workflow.execute_activity( - pydantic_objects_activity, + heterogeneous_list_of_pydantic_models_activity, models, start_to_close_timeout=timedelta(minutes=1), ) @@ -684,7 +693,7 @@ async def test_homogeneous_list_of_pydantic_objects(client: Client): client, task_queue=task_queue_name, workflows=[HomogeneousListOfPydanticObjectsWorkflow], - activities=[pydantic_objects_activity], + activities=[homogeneous_list_of_pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( HomogeneousListOfPydanticObjectsWorkflow.run, @@ -709,7 +718,7 @@ async def test_heterogeneous_list_of_pydantic_objects(client: Client): client, task_queue=task_queue_name, workflows=[HeterogeneousListOfPydanticObjectsWorkflow], - activities=[pydantic_objects_activity], + activities=[heterogeneous_list_of_pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( HeterogeneousListOfPydanticObjectsWorkflow.run, @@ -746,7 +755,7 @@ async def run( ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( - pydantic_objects_activity, + heterogeneous_list_of_pydantic_models_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -766,7 +775,7 @@ async def test_mixed_collection_types(client: Client): client, task_queue=task_queue_name, workflows=[MixedCollectionTypesWorkflow], - activities=[pydantic_objects_activity], + activities=[heterogeneous_list_of_pydantic_models_activity], ): ( round_tripped_dataclass_objects, From 634fe807ba0671f1927cf8566ec85d1c7405e31a Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 09:57:44 -0500 Subject: [PATCH 59/96] Reduce tests --- tests/contrib/test_pydantic.py | 101 +++++++-------------------------- 1 file changed, 22 insertions(+), 79 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index a5920da7a..38f840413 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -554,7 +554,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ) -HeterogeneousPydanticModels = Union[ +PydanticModels = Union[ StandardTypesModel, ComplexTypesModel, SpecialTypesModel, @@ -568,9 +568,6 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ] -HomogeneousPydanticModels = StandardTypesModel - - def _assert_datetime_validity(dt: datetime): assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) @@ -586,14 +583,7 @@ def _assert_timedelta_validity(td: timedelta): assert issubclass(td.__class__, timedelta) -def make_homogeneous_list_of_pydantic_objects() -> List[HomogeneousPydanticModels]: - objects = [make_standard_types_object()] - for o in objects: - o._check_instance() - return objects - - -def make_heterogeneous_list_of_pydantic_objects() -> List[HeterogeneousPydanticModels]: +def make_list_of_pydantic_objects() -> List[PydanticModels]: objects = [ make_standard_types_object(), make_complex_types_object(), @@ -612,42 +602,20 @@ def make_heterogeneous_list_of_pydantic_objects() -> List[HeterogeneousPydanticM @activity.defn -async def homogeneous_list_of_pydantic_models_activity( - models: List[HomogeneousPydanticModels], -) -> List[HomogeneousPydanticModels]: - return models - - -@activity.defn -async def heterogeneous_list_of_pydantic_models_activity( - models: List[HeterogeneousPydanticModels], -) -> List[HeterogeneousPydanticModels]: +async def pydantic_models_activity( + models: List[PydanticModels], +) -> List[PydanticModels]: return models @workflow.defn -class HomogeneousListOfPydanticObjectsWorkflow: +class PydanticObjectsWorkflow: @workflow.run - async def run( - self, models: List[HomogeneousPydanticModels] - ) -> List[HomogeneousPydanticModels]: - return await workflow.execute_activity( - homogeneous_list_of_pydantic_models_activity, - models, - start_to_close_timeout=timedelta(minutes=1), - ) - - -@workflow.defn -class HeterogeneousListOfPydanticObjectsWorkflow: - @workflow.run - async def run( - self, models: List[HeterogeneousPydanticModels] - ) -> List[HeterogeneousPydanticModels]: + async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: # TODO: test instantiation of models return await workflow.execute_activity( - heterogeneous_list_of_pydantic_models_activity, - models, + pydantic_models_activity, + objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -656,11 +624,11 @@ async def run( class InstantiationInSandboxWorkflow: @workflow.run async def run(self) -> None: - make_heterogeneous_list_of_pydantic_objects() + make_list_of_pydantic_objects() async def test_instantiation_outside_sandbox(): - make_heterogeneous_list_of_pydantic_objects() + make_list_of_pydantic_objects() async def test_instantiation_inside_sandbox(client: Client): @@ -681,47 +649,22 @@ async def test_instantiation_inside_sandbox(client: Client): ) -async def test_homogeneous_list_of_pydantic_objects(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_pydantic_objects = make_homogeneous_list_of_pydantic_objects() - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[HomogeneousListOfPydanticObjectsWorkflow], - activities=[homogeneous_list_of_pydantic_models_activity], - ): - round_tripped_pydantic_objects = await client.execute_workflow( - HomogeneousListOfPydanticObjectsWorkflow.run, - orig_pydantic_objects, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - assert orig_pydantic_objects == round_tripped_pydantic_objects - for o in round_tripped_pydantic_objects: - o._check_instance() - - -async def test_heterogeneous_list_of_pydantic_objects(client: Client): +async def test_round_trip_pydantic_objects(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_pydantic_objects = make_heterogeneous_list_of_pydantic_objects() + orig_pydantic_objects = make_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, - workflows=[HeterogeneousListOfPydanticObjectsWorkflow], - activities=[heterogeneous_list_of_pydantic_models_activity], + workflows=[PydanticObjectsWorkflow], + activities=[pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( - HeterogeneousListOfPydanticObjectsWorkflow.run, + PydanticObjectsWorkflow.run, orig_pydantic_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, @@ -747,15 +690,15 @@ async def run( self, input: Tuple[ List[MyDataClass], - List[HeterogeneousPydanticModels], + List[PydanticModels], ], ) -> Tuple[ List[MyDataClass], - List[HeterogeneousPydanticModels], + List[PydanticModels], ]: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( - heterogeneous_list_of_pydantic_models_activity, + pydantic_models_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -769,13 +712,13 @@ async def test_mixed_collection_types(client: Client): task_queue_name = str(uuid.uuid4()) orig_dataclass_objects = make_dataclass_objects() - orig_pydantic_objects = make_heterogeneous_list_of_pydantic_objects() + orig_pydantic_objects = make_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, workflows=[MixedCollectionTypesWorkflow], - activities=[heterogeneous_list_of_pydantic_models_activity], + activities=[pydantic_models_activity], ): ( round_tripped_dataclass_objects, @@ -796,7 +739,7 @@ async def test_mixed_collection_types(client: Client): class PydanticModelUsageWorkflow: @workflow.run async def run(self) -> None: - for o in make_heterogeneous_list_of_pydantic_objects(): + for o in make_list_of_pydantic_objects(): o._check_instance() From 899b92c30280733275aac0e1a147702effb4d4cd Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 10:19:30 -0500 Subject: [PATCH 60/96] Expand tests --- tests/contrib/test_pydantic.py | 67 +++++++++++++++++++++++++++++----- 1 file changed, 57 insertions(+), 10 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 38f840413..50f6c80f7 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -26,7 +26,7 @@ ) from annotated_types import Len -from pydantic import BaseModel, Field, WithJsonSchema +from pydantic import BaseModel, Field, WithJsonSchema, create_model from typing_extensions import TypedDict from temporalio import activity, workflow @@ -609,10 +609,16 @@ async def pydantic_models_activity( @workflow.defn -class PydanticObjectsWorkflow: +class InstantiateModelsWorkflow: + @workflow.run + async def run(self) -> None: + make_list_of_pydantic_objects() + + +@workflow.defn +class RoundTripObjectsWorkflow: @workflow.run async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: - # TODO: test instantiation of models return await workflow.execute_activity( pydantic_models_activity, objects, @@ -620,11 +626,24 @@ async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: ) +def clone_objects(objects: List[PydanticModels]) -> List[PydanticModels]: + new_objects = [] + for o in objects: + fields = {} + for name, f in o.model_fields.items(): + fields[name] = (f.annotation, f) + model = create_model(o.__class__.__name__, **fields) + new_objects.append(model(**o.model_dump(by_alias=True))) + for old, new in zip(objects, new_objects): + assert old.model_dump() == new.model_dump() + return new_objects + + @workflow.defn -class InstantiationInSandboxWorkflow: +class CloneObjectsWorkflow: @workflow.run - async def run(self) -> None: - make_list_of_pydantic_objects() + async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: + return clone_objects(objects) async def test_instantiation_outside_sandbox(): @@ -640,10 +659,10 @@ async def test_instantiation_inside_sandbox(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[InstantiationInSandboxWorkflow], + workflows=[InstantiateModelsWorkflow], ): await client.execute_workflow( - InstantiationInSandboxWorkflow.run, + InstantiateModelsWorkflow.run, id=str(uuid.uuid4()), task_queue=task_queue_name, ) @@ -660,11 +679,11 @@ async def test_round_trip_pydantic_objects(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[PydanticObjectsWorkflow], + workflows=[RoundTripObjectsWorkflow], activities=[pydantic_models_activity], ): round_tripped_pydantic_objects = await client.execute_workflow( - PydanticObjectsWorkflow.run, + RoundTripObjectsWorkflow.run, orig_pydantic_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, @@ -674,6 +693,34 @@ async def test_round_trip_pydantic_objects(client: Client): o._check_instance() +async def test_clone_objects_outside_sandbox(): + clone_objects(make_list_of_pydantic_objects()) + + +async def test_clone_objects_in_sandbox(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_pydantic_objects = make_list_of_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[CloneObjectsWorkflow], + ): + round_tripped_pydantic_objects = await client.execute_workflow( + CloneObjectsWorkflow.run, + orig_pydantic_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert round_tripped_pydantic_objects == orig_pydantic_objects + for o in round_tripped_pydantic_objects: + o._check_instance() + + @dataclasses.dataclass class MyDataClass: int_field: int From be5f5697103dd37ad3ab1a611fb96aa039980c06 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 13:12:34 -0500 Subject: [PATCH 61/96] Test union fields --- tests/contrib/test_pydantic.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 50f6c80f7..3a264115c 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -422,6 +422,24 @@ def make_generic_string_object() -> GenericModel[str]: ) +class UnionModel(BaseModel): + simple_union_field: Union[str, int] + proxied_union_field: Union[datetime, Path] + + def _check_instance(self) -> None: + assert isinstance(self.simple_union_field, str) + assert self.simple_union_field == "string_or_int" + assert isinstance(self.proxied_union_field, Path) + assert self.proxied_union_field == Path("test/path") + + +def make_union_object() -> UnionModel: + return UnionModel( + simple_union_field="string_or_int", + proxied_union_field=Path("test/path"), + ) + + class PydanticDatetimeModel(BaseModel): datetime_field: datetime datetime_field_assigned_field: datetime = Field() @@ -562,6 +580,7 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: FieldFeaturesModel, AnnotatedFieldsModel, GenericModel[Any], + UnionModel, PydanticDatetimeModel, PydanticDateModel, PydanticTimedeltaModel, @@ -592,6 +611,7 @@ def make_list_of_pydantic_objects() -> List[PydanticModels]: make_field_features_object(), make_annotated_fields_object(), make_generic_string_object(), + make_union_object(), make_pydantic_datetime_object(), make_pydantic_date_object(), make_pydantic_timedelta_object(), From 61e75affc4edea47ea923c88ef7c9d60605de03c Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 13:15:24 -0500 Subject: [PATCH 62/96] Complex custom type --- tests/contrib/test_pydantic.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 3a264115c..873e0d6a7 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -750,19 +750,16 @@ def make_dataclass_objects() -> List[MyDataClass]: return [MyDataClass(int_field=7)] +ComplexCustomType = Tuple[List[MyDataClass], List[PydanticModels]] + + @workflow.defn -class MixedCollectionTypesWorkflow: +class ComplexCustomTypeWorkflow: @workflow.run async def run( self, - input: Tuple[ - List[MyDataClass], - List[PydanticModels], - ], - ) -> Tuple[ - List[MyDataClass], - List[PydanticModels], - ]: + input: ComplexCustomType, + ) -> ComplexCustomType: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( pydantic_models_activity, @@ -772,7 +769,7 @@ async def run( return data_classes, pydantic_objects -async def test_mixed_collection_types(client: Client): +async def test_complex_custom_type(client: Client): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) @@ -784,14 +781,14 @@ async def test_mixed_collection_types(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[MixedCollectionTypesWorkflow], + workflows=[ComplexCustomTypeWorkflow], activities=[pydantic_models_activity], ): ( round_tripped_dataclass_objects, round_tripped_pydantic_objects, ) = await client.execute_workflow( - MixedCollectionTypesWorkflow.run, + ComplexCustomTypeWorkflow.run, (orig_dataclass_objects, orig_pydantic_objects), id=str(uuid.uuid4()), task_queue=task_queue_name, From 02d5d9884cb70c521d556eabedbc573750b12d29 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sat, 8 Feb 2025 13:45:19 -0500 Subject: [PATCH 63/96] Test complex union --- tests/contrib/test_pydantic.py | 74 ++++++++++++++++++++++++++++++++-- 1 file changed, 71 insertions(+), 3 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 873e0d6a7..f44f41595 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -741,13 +741,14 @@ async def test_clone_objects_in_sandbox(client: Client): o._check_instance() -@dataclasses.dataclass +@dataclasses.dataclass(order=True) class MyDataClass: - int_field: int + # The name int_field also occurs in StandardTypesModel and currently unions can match them up incorrectly. + data_class_int_field: int def make_dataclass_objects() -> List[MyDataClass]: - return [MyDataClass(int_field=7)] + return [MyDataClass(data_class_int_field=7)] ComplexCustomType = Tuple[List[MyDataClass], List[PydanticModels]] @@ -799,6 +800,73 @@ async def test_complex_custom_type(client: Client): o._check_instance() +ComplexCustomUnionType = List[Union[MyDataClass, PydanticModels]] + + +@workflow.defn +class ComplexCustomUnionTypeWorkflow: + @workflow.run + async def run( + self, + input: ComplexCustomUnionType, + ) -> ComplexCustomUnionType: + data_classes, pydantic_objects = [], [] + for o in input: + if dataclasses.is_dataclass(o): + data_classes.append(o) + elif isinstance(o, BaseModel): + pydantic_objects.append(o) + else: + raise TypeError(f"Unexpected type: {type(o)}") + pydantic_objects = await workflow.execute_activity( + pydantic_models_activity, + pydantic_objects, + start_to_close_timeout=timedelta(minutes=1), + ) + return data_classes + pydantic_objects + + +async def test_complex_custom_union_type(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_dataclass_objects = make_dataclass_objects() + orig_pydantic_objects = make_list_of_pydantic_objects() + orig_objects = orig_dataclass_objects + orig_pydantic_objects + import random + + random.shuffle(orig_objects) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[ComplexCustomUnionTypeWorkflow], + activities=[pydantic_models_activity], + ): + round_tripped_objects = await client.execute_workflow( + ComplexCustomUnionTypeWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + round_tripped_dataclass_objects, round_tripped_pydantic_objects = [], [] + for o in round_tripped_objects: + if isinstance(o, MyDataClass): + round_tripped_dataclass_objects.append(o) + elif isinstance(o, BaseModel): + round_tripped_pydantic_objects.append(o) + else: + raise TypeError(f"Unexpected type: {type(o)}") + assert sorted(orig_dataclass_objects) == sorted(round_tripped_dataclass_objects) + assert sorted(orig_pydantic_objects, key=lambda o: o.__class__.__name__) == sorted( + round_tripped_pydantic_objects, key=lambda o: o.__class__.__name__ + ) + for o in round_tripped_pydantic_objects: + o._check_instance() + + @workflow.defn class PydanticModelUsageWorkflow: @workflow.run From 706f5f25d079e58ef7017fa46f33e7c1b761d4ca Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 9 Feb 2025 10:12:06 -0500 Subject: [PATCH 64/96] Rename --- tests/contrib/test_pydantic.py | 46 +++++++++++++++++----------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index f44f41595..7876977db 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -694,7 +694,7 @@ async def test_round_trip_pydantic_objects(client: Client): client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_pydantic_objects = make_list_of_pydantic_objects() + orig_objects = make_list_of_pydantic_objects() async with Worker( client, @@ -702,14 +702,14 @@ async def test_round_trip_pydantic_objects(client: Client): workflows=[RoundTripObjectsWorkflow], activities=[pydantic_models_activity], ): - round_tripped_pydantic_objects = await client.execute_workflow( + returned_objects = await client.execute_workflow( RoundTripObjectsWorkflow.run, - orig_pydantic_objects, + orig_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, ) - assert orig_pydantic_objects == round_tripped_pydantic_objects - for o in round_tripped_pydantic_objects: + assert returned_objects == orig_objects + for o in returned_objects: o._check_instance() @@ -723,21 +723,21 @@ async def test_clone_objects_in_sandbox(client: Client): client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_pydantic_objects = make_list_of_pydantic_objects() + orig_objects = make_list_of_pydantic_objects() async with Worker( client, task_queue=task_queue_name, workflows=[CloneObjectsWorkflow], ): - round_tripped_pydantic_objects = await client.execute_workflow( + returned_objects = await client.execute_workflow( CloneObjectsWorkflow.run, - orig_pydantic_objects, + orig_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, ) - assert round_tripped_pydantic_objects == orig_pydantic_objects - for o in round_tripped_pydantic_objects: + assert returned_objects == orig_objects + for o in returned_objects: o._check_instance() @@ -786,17 +786,17 @@ async def test_complex_custom_type(client: Client): activities=[pydantic_models_activity], ): ( - round_tripped_dataclass_objects, - round_tripped_pydantic_objects, + returned_dataclass_objects, + returned_pydantic_objects, ) = await client.execute_workflow( ComplexCustomTypeWorkflow.run, (orig_dataclass_objects, orig_pydantic_objects), id=str(uuid.uuid4()), task_queue=task_queue_name, ) - assert orig_dataclass_objects == round_tripped_dataclass_objects - assert orig_pydantic_objects == round_tripped_pydantic_objects - for o in round_tripped_pydantic_objects: + assert orig_dataclass_objects == returned_dataclass_objects + assert orig_pydantic_objects == returned_pydantic_objects + for o in returned_pydantic_objects: o._check_instance() @@ -845,25 +845,25 @@ async def test_complex_custom_union_type(client: Client): workflows=[ComplexCustomUnionTypeWorkflow], activities=[pydantic_models_activity], ): - round_tripped_objects = await client.execute_workflow( + returned_objects = await client.execute_workflow( ComplexCustomUnionTypeWorkflow.run, orig_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, ) - round_tripped_dataclass_objects, round_tripped_pydantic_objects = [], [] - for o in round_tripped_objects: + returned_dataclass_objects, returned_pydantic_objects = [], [] + for o in returned_objects: if isinstance(o, MyDataClass): - round_tripped_dataclass_objects.append(o) + returned_dataclass_objects.append(o) elif isinstance(o, BaseModel): - round_tripped_pydantic_objects.append(o) + returned_pydantic_objects.append(o) else: raise TypeError(f"Unexpected type: {type(o)}") - assert sorted(orig_dataclass_objects) == sorted(round_tripped_dataclass_objects) + assert sorted(orig_dataclass_objects) == sorted(returned_dataclass_objects) assert sorted(orig_pydantic_objects, key=lambda o: o.__class__.__name__) == sorted( - round_tripped_pydantic_objects, key=lambda o: o.__class__.__name__ + returned_pydantic_objects, key=lambda o: o.__class__.__name__ ) - for o in round_tripped_pydantic_objects: + for o in returned_pydantic_objects: o._check_instance() From 0ce76b79986d3857f6e5468ac0e82f6b26efdebc Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Sun, 9 Feb 2025 20:44:39 -0500 Subject: [PATCH 65/96] Use non-list input --- tests/contrib/test_pydantic.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index 7876977db..a598c1c9c 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -198,7 +198,8 @@ def make_standard_types_object() -> StandardTypesModel: str_enum_field=FruitEnum.apple, int_enum_field=NumberEnum.one, # Collections - list_field=[1, 2, 3], + # these cast input to list, tuple, set, etc. + list_field={1, 2, 3}, # type: ignore tuple_field=(1, 2, 3), set_field={1, 2, 3}, frozenset_field=frozenset([1, 2, 3]), From 047059244f9d268ec49e904925b7141aba7e83b5 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 10:29:02 -0500 Subject: [PATCH 66/96] Fix lint --- tests/contrib/test_pydantic.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/test_pydantic.py index a598c1c9c..e5766d0e4 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/test_pydantic.py @@ -653,7 +653,7 @@ def clone_objects(objects: List[PydanticModels]) -> List[PydanticModels]: fields = {} for name, f in o.model_fields.items(): fields[name] = (f.annotation, f) - model = create_model(o.__class__.__name__, **fields) + model = create_model(o.__class__.__name__, **fields) # type: ignore new_objects.append(model(**o.model_dump(by_alias=True))) for old, new in zip(objects, new_objects): assert old.model_dump() == new.model_dump() @@ -811,7 +811,8 @@ async def run( self, input: ComplexCustomUnionType, ) -> ComplexCustomUnionType: - data_classes, pydantic_objects = [], [] + data_classes = [] + pydantic_objects: List[PydanticModels] = [] for o in input: if dataclasses.is_dataclass(o): data_classes.append(o) @@ -824,7 +825,7 @@ async def run( pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) - return data_classes + pydantic_objects + return data_classes + pydantic_objects # type: ignore async def test_complex_custom_union_type(client: Client): From e1080c78e69dd486602858ebc55ed4a4cae404be Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 10:44:57 -0500 Subject: [PATCH 67/96] Reorganize --- tests/contrib/pydantic/activities.py | 11 + .../{test_pydantic.py => pydantic/models.py} | 323 +----------------- tests/contrib/pydantic/test_pydantic.py | 205 +++++++++++ tests/contrib/pydantic/workflows.py | 111 ++++++ 4 files changed, 343 insertions(+), 307 deletions(-) create mode 100644 tests/contrib/pydantic/activities.py rename tests/contrib/{test_pydantic.py => pydantic/models.py} (70%) create mode 100644 tests/contrib/pydantic/test_pydantic.py create mode 100644 tests/contrib/pydantic/workflows.py diff --git a/tests/contrib/pydantic/activities.py b/tests/contrib/pydantic/activities.py new file mode 100644 index 000000000..780203351 --- /dev/null +++ b/tests/contrib/pydantic/activities.py @@ -0,0 +1,11 @@ +from typing import List + +from temporalio import activity +from tests.contrib.pydantic.models import PydanticModels + + +@activity.defn +async def pydantic_models_activity( + models: List[PydanticModels], +) -> List[PydanticModels]: + return models diff --git a/tests/contrib/test_pydantic.py b/tests/contrib/pydantic/models.py similarity index 70% rename from tests/contrib/test_pydantic.py rename to tests/contrib/pydantic/models.py index e5766d0e4..4d9d8ae87 100644 --- a/tests/contrib/test_pydantic.py +++ b/tests/contrib/pydantic/models.py @@ -26,14 +26,9 @@ ) from annotated_types import Len -from pydantic import BaseModel, Field, WithJsonSchema, create_model +from pydantic import BaseModel, Field, WithJsonSchema from typing_extensions import TypedDict -from temporalio import activity, workflow -from temporalio.client import Client -from temporalio.contrib.pydantic import pydantic_data_converter -from temporalio.worker import Worker - SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) ShortSequence = Annotated[SequenceType, Len(max_length=2)] @@ -573,21 +568,6 @@ def make_pydantic_timedelta_object() -> PydanticTimedeltaModel: ) -PydanticModels = Union[ - StandardTypesModel, - ComplexTypesModel, - SpecialTypesModel, - ParentModel, - FieldFeaturesModel, - AnnotatedFieldsModel, - GenericModel[Any], - UnionModel, - PydanticDatetimeModel, - PydanticDateModel, - PydanticTimedeltaModel, -] - - def _assert_datetime_validity(dt: datetime): assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) @@ -603,6 +583,21 @@ def _assert_timedelta_validity(td: timedelta): assert issubclass(td.__class__, timedelta) +PydanticModels = Union[ + StandardTypesModel, + ComplexTypesModel, + SpecialTypesModel, + ParentModel, + FieldFeaturesModel, + AnnotatedFieldsModel, + GenericModel[Any], + UnionModel, + PydanticDatetimeModel, + PydanticDateModel, + PydanticTimedeltaModel, +] + + def make_list_of_pydantic_objects() -> List[PydanticModels]: objects = [ make_standard_types_object(), @@ -622,126 +617,6 @@ def make_list_of_pydantic_objects() -> List[PydanticModels]: return objects # type: ignore -@activity.defn -async def pydantic_models_activity( - models: List[PydanticModels], -) -> List[PydanticModels]: - return models - - -@workflow.defn -class InstantiateModelsWorkflow: - @workflow.run - async def run(self) -> None: - make_list_of_pydantic_objects() - - -@workflow.defn -class RoundTripObjectsWorkflow: - @workflow.run - async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: - return await workflow.execute_activity( - pydantic_models_activity, - objects, - start_to_close_timeout=timedelta(minutes=1), - ) - - -def clone_objects(objects: List[PydanticModels]) -> List[PydanticModels]: - new_objects = [] - for o in objects: - fields = {} - for name, f in o.model_fields.items(): - fields[name] = (f.annotation, f) - model = create_model(o.__class__.__name__, **fields) # type: ignore - new_objects.append(model(**o.model_dump(by_alias=True))) - for old, new in zip(objects, new_objects): - assert old.model_dump() == new.model_dump() - return new_objects - - -@workflow.defn -class CloneObjectsWorkflow: - @workflow.run - async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: - return clone_objects(objects) - - -async def test_instantiation_outside_sandbox(): - make_list_of_pydantic_objects() - - -async def test_instantiation_inside_sandbox(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[InstantiateModelsWorkflow], - ): - await client.execute_workflow( - InstantiateModelsWorkflow.run, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - - -async def test_round_trip_pydantic_objects(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_objects = make_list_of_pydantic_objects() - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[RoundTripObjectsWorkflow], - activities=[pydantic_models_activity], - ): - returned_objects = await client.execute_workflow( - RoundTripObjectsWorkflow.run, - orig_objects, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - assert returned_objects == orig_objects - for o in returned_objects: - o._check_instance() - - -async def test_clone_objects_outside_sandbox(): - clone_objects(make_list_of_pydantic_objects()) - - -async def test_clone_objects_in_sandbox(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_objects = make_list_of_pydantic_objects() - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[CloneObjectsWorkflow], - ): - returned_objects = await client.execute_workflow( - CloneObjectsWorkflow.run, - orig_objects, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - assert returned_objects == orig_objects - for o in returned_objects: - o._check_instance() - - @dataclasses.dataclass(order=True) class MyDataClass: # The name int_field also occurs in StandardTypesModel and currently unions can match them up incorrectly. @@ -753,170 +628,4 @@ def make_dataclass_objects() -> List[MyDataClass]: ComplexCustomType = Tuple[List[MyDataClass], List[PydanticModels]] - - -@workflow.defn -class ComplexCustomTypeWorkflow: - @workflow.run - async def run( - self, - input: ComplexCustomType, - ) -> ComplexCustomType: - data_classes, pydantic_objects = input - pydantic_objects = await workflow.execute_activity( - pydantic_models_activity, - pydantic_objects, - start_to_close_timeout=timedelta(minutes=1), - ) - return data_classes, pydantic_objects - - -async def test_complex_custom_type(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_dataclass_objects = make_dataclass_objects() - orig_pydantic_objects = make_list_of_pydantic_objects() - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[ComplexCustomTypeWorkflow], - activities=[pydantic_models_activity], - ): - ( - returned_dataclass_objects, - returned_pydantic_objects, - ) = await client.execute_workflow( - ComplexCustomTypeWorkflow.run, - (orig_dataclass_objects, orig_pydantic_objects), - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - assert orig_dataclass_objects == returned_dataclass_objects - assert orig_pydantic_objects == returned_pydantic_objects - for o in returned_pydantic_objects: - o._check_instance() - - ComplexCustomUnionType = List[Union[MyDataClass, PydanticModels]] - - -@workflow.defn -class ComplexCustomUnionTypeWorkflow: - @workflow.run - async def run( - self, - input: ComplexCustomUnionType, - ) -> ComplexCustomUnionType: - data_classes = [] - pydantic_objects: List[PydanticModels] = [] - for o in input: - if dataclasses.is_dataclass(o): - data_classes.append(o) - elif isinstance(o, BaseModel): - pydantic_objects.append(o) - else: - raise TypeError(f"Unexpected type: {type(o)}") - pydantic_objects = await workflow.execute_activity( - pydantic_models_activity, - pydantic_objects, - start_to_close_timeout=timedelta(minutes=1), - ) - return data_classes + pydantic_objects # type: ignore - - -async def test_complex_custom_union_type(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - orig_dataclass_objects = make_dataclass_objects() - orig_pydantic_objects = make_list_of_pydantic_objects() - orig_objects = orig_dataclass_objects + orig_pydantic_objects - import random - - random.shuffle(orig_objects) - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[ComplexCustomUnionTypeWorkflow], - activities=[pydantic_models_activity], - ): - returned_objects = await client.execute_workflow( - ComplexCustomUnionTypeWorkflow.run, - orig_objects, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - returned_dataclass_objects, returned_pydantic_objects = [], [] - for o in returned_objects: - if isinstance(o, MyDataClass): - returned_dataclass_objects.append(o) - elif isinstance(o, BaseModel): - returned_pydantic_objects.append(o) - else: - raise TypeError(f"Unexpected type: {type(o)}") - assert sorted(orig_dataclass_objects) == sorted(returned_dataclass_objects) - assert sorted(orig_pydantic_objects, key=lambda o: o.__class__.__name__) == sorted( - returned_pydantic_objects, key=lambda o: o.__class__.__name__ - ) - for o in returned_pydantic_objects: - o._check_instance() - - -@workflow.defn -class PydanticModelUsageWorkflow: - @workflow.run - async def run(self) -> None: - for o in make_list_of_pydantic_objects(): - o._check_instance() - - -async def test_pydantic_model_usage_in_workflow(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[PydanticModelUsageWorkflow], - ): - await client.execute_workflow( - PydanticModelUsageWorkflow.run, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) - - -@workflow.defn -class DatetimeUsageWorkflow: - @workflow.run - async def run(self) -> None: - dt = workflow.now() - assert isinstance(dt, datetime) - assert issubclass(dt.__class__, datetime) - - -async def test_datetime_usage_in_workflow(client: Client): - new_config = client.config() - new_config["data_converter"] = pydantic_data_converter - client = Client(**new_config) - task_queue_name = str(uuid.uuid4()) - - async with Worker( - client, - task_queue=task_queue_name, - workflows=[DatetimeUsageWorkflow], - ): - await client.execute_workflow( - DatetimeUsageWorkflow.run, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py new file mode 100644 index 000000000..d16367076 --- /dev/null +++ b/tests/contrib/pydantic/test_pydantic.py @@ -0,0 +1,205 @@ +import dataclasses +import uuid + +from pydantic import BaseModel + +from temporalio.client import Client +from temporalio.contrib.pydantic import pydantic_data_converter +from temporalio.worker import Worker +from tests.contrib.pydantic.models import ( + make_dataclass_objects, + make_list_of_pydantic_objects, +) +from tests.contrib.pydantic.workflows import ( + CloneObjectsWorkflow, + ComplexCustomTypeWorkflow, + ComplexCustomUnionTypeWorkflow, + DatetimeUsageWorkflow, + InstantiateModelsWorkflow, + PydanticModelUsageWorkflow, + RoundTripObjectsWorkflow, + clone_objects, + pydantic_models_activity, +) + + +async def test_instantiation_outside_sandbox(): + make_list_of_pydantic_objects() + + +async def test_instantiation_inside_sandbox(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[InstantiateModelsWorkflow], + ): + await client.execute_workflow( + InstantiateModelsWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + + +async def test_round_trip_pydantic_objects(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_objects = make_list_of_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[RoundTripObjectsWorkflow], + activities=[pydantic_models_activity], + ): + returned_objects = await client.execute_workflow( + RoundTripObjectsWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert returned_objects == orig_objects + for o in returned_objects: + o._check_instance() + + +async def test_clone_objects_outside_sandbox(): + clone_objects(make_list_of_pydantic_objects()) + + +async def test_clone_objects_in_sandbox(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_objects = make_list_of_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[CloneObjectsWorkflow], + ): + returned_objects = await client.execute_workflow( + CloneObjectsWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert returned_objects == orig_objects + for o in returned_objects: + o._check_instance() + + +async def test_complex_custom_type(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_dataclass_objects = make_dataclass_objects() + orig_pydantic_objects = make_list_of_pydantic_objects() + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[ComplexCustomTypeWorkflow], + activities=[pydantic_models_activity], + ): + ( + returned_dataclass_objects, + returned_pydantic_objects, + ) = await client.execute_workflow( + ComplexCustomTypeWorkflow.run, + (orig_dataclass_objects, orig_pydantic_objects), + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert orig_dataclass_objects == returned_dataclass_objects + assert orig_pydantic_objects == returned_pydantic_objects + for o in returned_pydantic_objects: + o._check_instance() + + +async def test_complex_custom_union_type(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_dataclass_objects = make_dataclass_objects() + orig_pydantic_objects = make_list_of_pydantic_objects() + orig_objects = orig_dataclass_objects + orig_pydantic_objects + import random + + random.shuffle(orig_objects) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[ComplexCustomUnionTypeWorkflow], + activities=[pydantic_models_activity], + ): + returned_objects = await client.execute_workflow( + ComplexCustomUnionTypeWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + returned_dataclass_objects, returned_pydantic_objects = [], [] + for o in returned_objects: + if dataclasses.is_dataclass(o): + returned_dataclass_objects.append(o) + elif isinstance(o, BaseModel): + returned_pydantic_objects.append(o) + else: + raise TypeError(f"Unexpected type: {type(o)}") + assert sorted(orig_dataclass_objects) == sorted(returned_dataclass_objects) + assert sorted(orig_pydantic_objects, key=lambda o: o.__class__.__name__) == sorted( + returned_pydantic_objects, key=lambda o: o.__class__.__name__ + ) + for o in returned_pydantic_objects: + o._check_instance() + + +async def test_pydantic_model_usage_in_workflow(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[PydanticModelUsageWorkflow], + ): + await client.execute_workflow( + PydanticModelUsageWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + + +async def test_datetime_usage_in_workflow(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[DatetimeUsageWorkflow], + ): + await client.execute_workflow( + DatetimeUsageWorkflow.run, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py new file mode 100644 index 000000000..4a2a99cb9 --- /dev/null +++ b/tests/contrib/pydantic/workflows.py @@ -0,0 +1,111 @@ +import dataclasses +from datetime import datetime, timedelta +from typing import List + +from pydantic import BaseModel, create_model + +from temporalio import workflow + +with workflow.unsafe.imports_passed_through(): + from tests.contrib.pydantic.activities import pydantic_models_activity + from tests.contrib.pydantic.models import ( + ComplexCustomType, + ComplexCustomUnionType, + PydanticModels, + make_list_of_pydantic_objects, + ) + + +def clone_objects(objects: List[PydanticModels]) -> List[PydanticModels]: + new_objects = [] + for o in objects: + fields = {} + for name, f in o.model_fields.items(): + fields[name] = (f.annotation, f) + model = create_model(o.__class__.__name__, **fields) # type: ignore + new_objects.append(model(**o.model_dump(by_alias=True))) + for old, new in zip(objects, new_objects): + assert old.model_dump() == new.model_dump() + return new_objects + + +@workflow.defn +class InstantiateModelsWorkflow: + @workflow.run + async def run(self) -> None: + make_list_of_pydantic_objects() + + +@workflow.defn +class RoundTripObjectsWorkflow: + @workflow.run + async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: + return await workflow.execute_activity( + pydantic_models_activity, + objects, + start_to_close_timeout=timedelta(minutes=1), + ) + + +@workflow.defn +class CloneObjectsWorkflow: + @workflow.run + async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: + return clone_objects(objects) + + +@workflow.defn +class ComplexCustomUnionTypeWorkflow: + @workflow.run + async def run( + self, + input: ComplexCustomUnionType, + ) -> ComplexCustomUnionType: + data_classes = [] + pydantic_objects: List[PydanticModels] = [] + for o in input: + if dataclasses.is_dataclass(o): + data_classes.append(o) + elif isinstance(o, BaseModel): + pydantic_objects.append(o) + else: + raise TypeError(f"Unexpected type: {type(o)}") + pydantic_objects = await workflow.execute_activity( + pydantic_models_activity, + pydantic_objects, + start_to_close_timeout=timedelta(minutes=1), + ) + return data_classes + pydantic_objects # type: ignore + + +@workflow.defn +class ComplexCustomTypeWorkflow: + @workflow.run + async def run( + self, + input: ComplexCustomType, + ) -> ComplexCustomType: + data_classes, pydantic_objects = input + pydantic_objects = await workflow.execute_activity( + pydantic_models_activity, + pydantic_objects, + start_to_close_timeout=timedelta(minutes=1), + ) + return data_classes, pydantic_objects + + +@workflow.defn +class PydanticModelUsageWorkflow: + @workflow.run + async def run(self) -> None: + for o in make_list_of_pydantic_objects(): + o._check_instance() + + +@workflow.defn +class DatetimeUsageWorkflow: + @workflow.run + async def run(self) -> None: + dt = workflow.now() + assert isinstance(dt, datetime) + assert issubclass(dt.__class__, datetime) From 62b2ee52cc228a9948d25bb41d357e6e9c934dd7 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 11:47:37 -0500 Subject: [PATCH 68/96] Define some models inside the sandbox and some outside --- tests/contrib/pydantic/models.py | 315 ++-------------------------- tests/contrib/pydantic/models_2.py | 312 +++++++++++++++++++++++++++ tests/contrib/pydantic/workflows.py | 13 +- 3 files changed, 333 insertions(+), 307 deletions(-) create mode 100644 tests/contrib/pydantic/models_2.py diff --git a/tests/contrib/pydantic/models.py b/tests/contrib/pydantic/models.py index 4d9d8ae87..85c4d8c65 100644 --- a/tests/contrib/pydantic/models.py +++ b/tests/contrib/pydantic/models.py @@ -1,25 +1,13 @@ -import collections import dataclasses -import decimal -import fractions -import re -import uuid -from datetime import date, datetime, time, timedelta, timezone -from enum import Enum, IntEnum -from ipaddress import IPv4Address +from datetime import date, datetime, timedelta from pathlib import Path from typing import ( Annotated, Any, Dict, Generic, - Hashable, List, - NamedTuple, - Optional, - Pattern, Sequence, - Set, Tuple, TypeVar, Union, @@ -27,297 +15,22 @@ from annotated_types import Len from pydantic import BaseModel, Field, WithJsonSchema -from typing_extensions import TypedDict -SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) -ShortSequence = Annotated[SequenceType, Len(max_length=2)] - - -class FruitEnum(str, Enum): - apple = "apple" - banana = "banana" - - -class NumberEnum(IntEnum): - one = 1 - two = 2 - - -class UserTypedDict(TypedDict): - name: str - id: int - - -class TypedDictModel(BaseModel): - typed_dict_field: UserTypedDict - - def _check_instance(self) -> None: - assert isinstance(self.typed_dict_field, dict) - assert self.typed_dict_field == {"name": "username", "id": 7} - - -def make_typed_dict_object() -> TypedDictModel: - return TypedDictModel(typed_dict_field={"name": "username", "id": 7}) - - -class StandardTypesModel(BaseModel): - # Boolean - bool_field: bool - bool_field_int: bool - bool_field_str: bool - - # Numbers - int_field: int - float_field: float - decimal_field: decimal.Decimal - complex_field: complex - fraction_field: fractions.Fraction - - # Strings and Bytes - str_field: str - bytes_field: bytes - - # None - none_field: None - - # Enums - str_enum_field: FruitEnum - int_enum_field: NumberEnum - - # Collections - list_field: list - tuple_field: tuple - set_field: set - frozenset_field: frozenset - deque_field: collections.deque - sequence_field: Sequence[int] - # Iterable[int] supported but not tested since original vs round-tripped do not compare equal - - # Mappings - dict_field: dict - # defaultdict_field: collections.defaultdict - counter_field: collections.Counter - typed_dict_field: UserTypedDict - - # Other Types - pattern_field: Pattern - hashable_field: Hashable - any_field: Any - # callable_field: Callable - - def _check_instance(self) -> None: - # Boolean checks - assert isinstance(self.bool_field, bool) - assert self.bool_field is True - assert isinstance(self.bool_field_int, bool) - assert self.bool_field_int is True - assert isinstance(self.bool_field_str, bool) - assert self.bool_field_str is True - - # Number checks - assert isinstance(self.int_field, int) - assert self.int_field == 42 - assert isinstance(self.float_field, float) - assert self.float_field == 3.14 - assert isinstance(self.decimal_field, decimal.Decimal) - assert self.decimal_field == decimal.Decimal("3.14") - assert isinstance(self.complex_field, complex) - assert self.complex_field == complex(1, 2) - assert isinstance(self.fraction_field, fractions.Fraction) - assert self.fraction_field == fractions.Fraction(22, 7) - - # String and Bytes checks - assert isinstance(self.str_field, str) - assert self.str_field == "hello" - assert isinstance(self.bytes_field, bytes) - assert self.bytes_field == b"world" - - # None check - assert self.none_field is None - - # Enum checks - assert isinstance(self.str_enum_field, Enum) - assert isinstance(self.int_enum_field, IntEnum) - - # Collection checks - assert isinstance(self.list_field, list) - assert self.list_field == [1, 2, 3] - assert isinstance(self.tuple_field, tuple) - assert self.tuple_field == (1, 2, 3) - assert isinstance(self.set_field, set) - assert self.set_field == {1, 2, 3} - assert isinstance(self.frozenset_field, frozenset) - assert self.frozenset_field == frozenset([1, 2, 3]) - assert isinstance(self.deque_field, collections.deque) - assert list(self.deque_field) == [1, 2, 3] - assert isinstance(self.sequence_field, list) - assert list(self.sequence_field) == [1, 2, 3] - - # Mapping checks - assert isinstance(self.dict_field, dict) - assert self.dict_field == {"a": 1, "b": 2} - # assert isinstance(self.defaultdict_field, collections.defaultdict) - # assert dict(self.defaultdict_field) == {"a": 1, "b": 2} - assert isinstance(self.counter_field, collections.Counter) - assert dict(self.counter_field) == {"a": 1, "b": 2} - assert isinstance(self.typed_dict_field, dict) - assert self.typed_dict_field == {"name": "username", "id": 7} - - # Other type checks - assert isinstance(self.pattern_field, Pattern) - assert self.pattern_field.pattern == r"\d+" - assert isinstance(self.hashable_field, Hashable) - assert self.hashable_field == "test" - assert self.any_field == "anything goes" - # assert callable(self.callable_field) - - -def make_standard_types_object() -> StandardTypesModel: - return StandardTypesModel( - # Boolean - bool_field=True, - bool_field_int=1, # type: ignore - bool_field_str="true", # type: ignore - # Numbers - int_field=42, - float_field=3.14, - decimal_field=decimal.Decimal("3.14"), - complex_field=complex(1, 2), - fraction_field=fractions.Fraction(22, 7), - # Strings and Bytes - str_field="hello", - bytes_field=b"world", - # None - none_field=None, - # Enums - str_enum_field=FruitEnum.apple, - int_enum_field=NumberEnum.one, - # Collections - # these cast input to list, tuple, set, etc. - list_field={1, 2, 3}, # type: ignore - tuple_field=(1, 2, 3), - set_field={1, 2, 3}, - frozenset_field=frozenset([1, 2, 3]), - deque_field=collections.deque([1, 2, 3]), - # other sequence types are converted to list, as documented - sequence_field=[1, 2, 3], - # Mappings - dict_field={"a": 1, "b": 2}, - # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), - counter_field=collections.Counter({"a": 1, "b": 2}), - typed_dict_field={"name": "username", "id": 7}, - # Other Types - pattern_field=re.compile(r"\d+"), - hashable_field="test", - any_field="anything goes", - # callable_field=lambda x: x, +from temporalio import workflow + +# Define some of the models outside the sandbox +with workflow.unsafe.imports_passed_through(): + from tests.contrib.pydantic.models_2 import ( + ComplexTypesModel, + SpecialTypesModel, + StandardTypesModel, + make_complex_types_object, + make_special_types_object, + make_standard_types_object, ) - -class Point(NamedTuple): - x: int - y: int - - -class ComplexTypesModel(BaseModel): - list_field: List[str] - dict_field: Dict[str, int] - set_field: Set[int] - tuple_field: Tuple[str, int] - union_field: Union[str, int] - optional_field: Optional[str] - named_tuple_field: Point - - def _check_instance(self) -> None: - assert isinstance(self.list_field, list) - assert isinstance(self.dict_field, dict) - assert isinstance(self.set_field, set) - assert isinstance(self.tuple_field, tuple) - assert isinstance(self.union_field, str) - assert isinstance(self.optional_field, str) - assert self.list_field == ["a", "b", "c"] - assert self.dict_field == {"x": 1, "y": 2} - assert self.set_field == {1, 2, 3} - assert self.tuple_field == ("hello", 42) - assert self.union_field == "string_or_int" - assert self.optional_field == "present" - assert self.named_tuple_field == Point(x=1, y=2) - - -def make_complex_types_object() -> ComplexTypesModel: - return ComplexTypesModel( - list_field=["a", "b", "c"], - dict_field={"x": 1, "y": 2}, - set_field={1, 2, 3}, - tuple_field=("hello", 42), - union_field="string_or_int", - optional_field="present", - named_tuple_field=Point(x=1, y=2), - ) - - -class SpecialTypesModel(BaseModel): - datetime_field: datetime - datetime_field_int: datetime - datetime_field_float: datetime - datetime_field_str_formatted: datetime - datetime_field_str_int: datetime - datetime_field_date: datetime - - time_field: time - time_field_str: time - - date_field: date - timedelta_field: timedelta - path_field: Path - uuid_field: uuid.UUID - ip_field: IPv4Address - - def _check_instance(self) -> None: - dt = datetime(2000, 1, 2, 3, 4, 5) - dtz = datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) - assert isinstance(self.datetime_field, datetime) - assert isinstance(self.datetime_field_int, datetime) - assert isinstance(self.datetime_field_float, datetime) - assert isinstance(self.datetime_field_str_formatted, datetime) - assert isinstance(self.datetime_field_str_int, datetime) - assert isinstance(self.datetime_field_date, datetime) - assert isinstance(self.timedelta_field, timedelta) - assert isinstance(self.path_field, Path) - assert isinstance(self.uuid_field, uuid.UUID) - assert isinstance(self.ip_field, IPv4Address) - assert self.datetime_field == dt - assert self.datetime_field_int == dtz - assert self.datetime_field_float == dtz - assert self.datetime_field_str_formatted == dtz - assert self.datetime_field_str_int == dtz - assert self.datetime_field_date == datetime(2000, 1, 2) - assert self.time_field == time(3, 4, 5) - assert self.time_field_str == time(3, 4, 5, tzinfo=timezone.utc) - assert self.date_field == date(2000, 1, 2) - assert self.timedelta_field == timedelta(days=1, hours=2) - assert self.path_field == Path("test/path") - assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") - assert self.ip_field == IPv4Address("127.0.0.1") - - -def make_special_types_object() -> SpecialTypesModel: - return SpecialTypesModel( - datetime_field=datetime(2000, 1, 2, 3, 4, 5), - # 946800245 - datetime_field_int=946782245, # type: ignore - datetime_field_float=946782245.0, # type: ignore - datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore - datetime_field_str_int="946782245", # type: ignore - datetime_field_date=datetime(2000, 1, 2), - time_field=time(3, 4, 5), - time_field_str="03:04:05Z", # type: ignore - date_field=date(2000, 1, 2), - timedelta_field=timedelta(days=1, hours=2), - path_field=Path("test/path"), - uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), - ip_field=IPv4Address("127.0.0.1"), - ) +SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) +ShortSequence = Annotated[SequenceType, Len(max_length=2)] class ChildModel(BaseModel): diff --git a/tests/contrib/pydantic/models_2.py b/tests/contrib/pydantic/models_2.py new file mode 100644 index 000000000..51f213d8a --- /dev/null +++ b/tests/contrib/pydantic/models_2.py @@ -0,0 +1,312 @@ +import collections +import decimal +import fractions +import re +import uuid +from datetime import date, datetime, time, timedelta, timezone +from enum import Enum, IntEnum +from ipaddress import IPv4Address +from pathlib import Path +from typing import ( + Any, + Dict, + Hashable, + List, + NamedTuple, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Union, +) + +from pydantic import BaseModel +from typing_extensions import TypedDict + + +class FruitEnum(str, Enum): + apple = "apple" + banana = "banana" + + +class NumberEnum(IntEnum): + one = 1 + two = 2 + + +class UserTypedDict(TypedDict): + name: str + id: int + + +class TypedDictModel(BaseModel): + typed_dict_field: UserTypedDict + + def _check_instance(self) -> None: + assert isinstance(self.typed_dict_field, dict) + assert self.typed_dict_field == {"name": "username", "id": 7} + + +def make_typed_dict_object() -> TypedDictModel: + return TypedDictModel(typed_dict_field={"name": "username", "id": 7}) + + +class StandardTypesModel(BaseModel): + # Boolean + bool_field: bool + bool_field_int: bool + bool_field_str: bool + + # Numbers + int_field: int + float_field: float + decimal_field: decimal.Decimal + complex_field: complex + fraction_field: fractions.Fraction + + # Strings and Bytes + str_field: str + bytes_field: bytes + + # None + none_field: None + + # Enums + str_enum_field: FruitEnum + int_enum_field: NumberEnum + + # Collections + list_field: list + tuple_field: tuple + set_field: set + frozenset_field: frozenset + deque_field: collections.deque + sequence_field: Sequence[int] + # Iterable[int] supported but not tested since original vs round-tripped do not compare equal + + # Mappings + dict_field: dict + # defaultdict_field: collections.defaultdict + counter_field: collections.Counter + typed_dict_field: UserTypedDict + + # Other Types + pattern_field: Pattern + hashable_field: Hashable + any_field: Any + # callable_field: Callable + + def _check_instance(self) -> None: + # Boolean checks + assert isinstance(self.bool_field, bool) + assert self.bool_field is True + assert isinstance(self.bool_field_int, bool) + assert self.bool_field_int is True + assert isinstance(self.bool_field_str, bool) + assert self.bool_field_str is True + + # Number checks + assert isinstance(self.int_field, int) + assert self.int_field == 42 + assert isinstance(self.float_field, float) + assert self.float_field == 3.14 + assert isinstance(self.decimal_field, decimal.Decimal) + assert self.decimal_field == decimal.Decimal("3.14") + assert isinstance(self.complex_field, complex) + assert self.complex_field == complex(1, 2) + assert isinstance(self.fraction_field, fractions.Fraction) + assert self.fraction_field == fractions.Fraction(22, 7) + + # String and Bytes checks + assert isinstance(self.str_field, str) + assert self.str_field == "hello" + assert isinstance(self.bytes_field, bytes) + assert self.bytes_field == b"world" + + # None check + assert self.none_field is None + + # Enum checks + assert isinstance(self.str_enum_field, Enum) + assert isinstance(self.int_enum_field, IntEnum) + + # Collection checks + assert isinstance(self.list_field, list) + assert self.list_field == [1, 2, 3] + assert isinstance(self.tuple_field, tuple) + assert self.tuple_field == (1, 2, 3) + assert isinstance(self.set_field, set) + assert self.set_field == {1, 2, 3} + assert isinstance(self.frozenset_field, frozenset) + assert self.frozenset_field == frozenset([1, 2, 3]) + assert isinstance(self.deque_field, collections.deque) + assert list(self.deque_field) == [1, 2, 3] + assert isinstance(self.sequence_field, list) + assert list(self.sequence_field) == [1, 2, 3] + + # Mapping checks + assert isinstance(self.dict_field, dict) + assert self.dict_field == {"a": 1, "b": 2} + # assert isinstance(self.defaultdict_field, collections.defaultdict) + # assert dict(self.defaultdict_field) == {"a": 1, "b": 2} + assert isinstance(self.counter_field, collections.Counter) + assert dict(self.counter_field) == {"a": 1, "b": 2} + assert isinstance(self.typed_dict_field, dict) + assert self.typed_dict_field == {"name": "username", "id": 7} + + # Other type checks + assert isinstance(self.pattern_field, Pattern) + assert self.pattern_field.pattern == r"\d+" + assert isinstance(self.hashable_field, Hashable) + assert self.hashable_field == "test" + assert self.any_field == "anything goes" + # assert callable(self.callable_field) + + +def make_standard_types_object() -> StandardTypesModel: + return StandardTypesModel( + # Boolean + bool_field=True, + bool_field_int=1, # type: ignore + bool_field_str="true", # type: ignore + # Numbers + int_field=42, + float_field=3.14, + decimal_field=decimal.Decimal("3.14"), + complex_field=complex(1, 2), + fraction_field=fractions.Fraction(22, 7), + # Strings and Bytes + str_field="hello", + bytes_field=b"world", + # None + none_field=None, + # Enums + str_enum_field=FruitEnum.apple, + int_enum_field=NumberEnum.one, + # Collections + # these cast input to list, tuple, set, etc. + list_field={1, 2, 3}, # type: ignore + tuple_field=(1, 2, 3), + set_field={1, 2, 3}, + frozenset_field=frozenset([1, 2, 3]), + deque_field=collections.deque([1, 2, 3]), + # other sequence types are converted to list, as documented + sequence_field=[1, 2, 3], + # Mappings + dict_field={"a": 1, "b": 2}, + # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), + counter_field=collections.Counter({"a": 1, "b": 2}), + typed_dict_field={"name": "username", "id": 7}, + # Other Types + pattern_field=re.compile(r"\d+"), + hashable_field="test", + any_field="anything goes", + # callable_field=lambda x: x, + ) + + +class Point(NamedTuple): + x: int + y: int + + +class ComplexTypesModel(BaseModel): + list_field: List[str] + dict_field: Dict[str, int] + set_field: Set[int] + tuple_field: Tuple[str, int] + union_field: Union[str, int] + optional_field: Optional[str] + named_tuple_field: Point + + def _check_instance(self) -> None: + assert isinstance(self.list_field, list) + assert isinstance(self.dict_field, dict) + assert isinstance(self.set_field, set) + assert isinstance(self.tuple_field, tuple) + assert isinstance(self.union_field, str) + assert isinstance(self.optional_field, str) + assert self.list_field == ["a", "b", "c"] + assert self.dict_field == {"x": 1, "y": 2} + assert self.set_field == {1, 2, 3} + assert self.tuple_field == ("hello", 42) + assert self.union_field == "string_or_int" + assert self.optional_field == "present" + assert self.named_tuple_field == Point(x=1, y=2) + + +def make_complex_types_object() -> ComplexTypesModel: + return ComplexTypesModel( + list_field=["a", "b", "c"], + dict_field={"x": 1, "y": 2}, + set_field={1, 2, 3}, + tuple_field=("hello", 42), + union_field="string_or_int", + optional_field="present", + named_tuple_field=Point(x=1, y=2), + ) + + +class SpecialTypesModel(BaseModel): + datetime_field: datetime + datetime_field_int: datetime + datetime_field_float: datetime + datetime_field_str_formatted: datetime + datetime_field_str_int: datetime + datetime_field_date: datetime + + time_field: time + time_field_str: time + + date_field: date + timedelta_field: timedelta + path_field: Path + uuid_field: uuid.UUID + ip_field: IPv4Address + + def _check_instance(self) -> None: + dt = datetime(2000, 1, 2, 3, 4, 5) + dtz = datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) + assert isinstance(self.datetime_field, datetime) + assert isinstance(self.datetime_field_int, datetime) + assert isinstance(self.datetime_field_float, datetime) + assert isinstance(self.datetime_field_str_formatted, datetime) + assert isinstance(self.datetime_field_str_int, datetime) + assert isinstance(self.datetime_field_date, datetime) + assert isinstance(self.timedelta_field, timedelta) + assert isinstance(self.path_field, Path) + assert isinstance(self.uuid_field, uuid.UUID) + assert isinstance(self.ip_field, IPv4Address) + assert self.datetime_field == dt + assert self.datetime_field_int == dtz + assert self.datetime_field_float == dtz + assert self.datetime_field_str_formatted == dtz + assert self.datetime_field_str_int == dtz + assert self.datetime_field_date == datetime(2000, 1, 2) + assert self.time_field == time(3, 4, 5) + assert self.time_field_str == time(3, 4, 5, tzinfo=timezone.utc) + assert self.date_field == date(2000, 1, 2) + assert self.timedelta_field == timedelta(days=1, hours=2) + assert self.path_field == Path("test/path") + assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") + assert self.ip_field == IPv4Address("127.0.0.1") + + +def make_special_types_object() -> SpecialTypesModel: + return SpecialTypesModel( + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + # 946800245 + datetime_field_int=946782245, # type: ignore + datetime_field_float=946782245.0, # type: ignore + datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore + datetime_field_str_int="946782245", # type: ignore + datetime_field_date=datetime(2000, 1, 2), + time_field=time(3, 4, 5), + time_field_str="03:04:05Z", # type: ignore + date_field=date(2000, 1, 2), + timedelta_field=timedelta(days=1, hours=2), + path_field=Path("test/path"), + uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), + ip_field=IPv4Address("127.0.0.1"), + ) diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index 4a2a99cb9..1b1bc359a 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -8,12 +8,13 @@ with workflow.unsafe.imports_passed_through(): from tests.contrib.pydantic.activities import pydantic_models_activity - from tests.contrib.pydantic.models import ( - ComplexCustomType, - ComplexCustomUnionType, - PydanticModels, - make_list_of_pydantic_objects, - ) + +from tests.contrib.pydantic.models import ( + ComplexCustomType, + ComplexCustomUnionType, + PydanticModels, + make_list_of_pydantic_objects, +) def clone_objects(objects: List[PydanticModels]) -> List[PydanticModels]: From b2e34878e59bacca423dfc19dc978406a528ba35 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 12:06:53 -0500 Subject: [PATCH 69/96] lint --- tests/contrib/pydantic/test_pydantic.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index d16367076..f9a5c0127 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -153,7 +153,8 @@ async def test_complex_custom_union_type(client: Client): id=str(uuid.uuid4()), task_queue=task_queue_name, ) - returned_dataclass_objects, returned_pydantic_objects = [], [] + returned_dataclass_objects = [] + returned_pydantic_objects: list[BaseModel] = [] for o in returned_objects: if dataclasses.is_dataclass(o): returned_dataclass_objects.append(o) @@ -161,12 +162,14 @@ async def test_complex_custom_union_type(client: Client): returned_pydantic_objects.append(o) else: raise TypeError(f"Unexpected type: {type(o)}") - assert sorted(orig_dataclass_objects) == sorted(returned_dataclass_objects) + assert sorted(orig_dataclass_objects, key=lambda o: o.__class__.__name__) == sorted( + returned_dataclass_objects, key=lambda o: o.__class__.__name__ + ) assert sorted(orig_pydantic_objects, key=lambda o: o.__class__.__name__) == sorted( returned_pydantic_objects, key=lambda o: o.__class__.__name__ ) - for o in returned_pydantic_objects: - o._check_instance() + for o2 in returned_pydantic_objects: + o2._check_instance() # type: ignore async def test_pydantic_model_usage_in_workflow(client: Client): From a5442248c2b3176082adff43b157c41bc9ed1ec7 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 12:17:51 -0500 Subject: [PATCH 70/96] Warn pydantic users who are not using contrib.pydantic --- temporalio/converter.py | 42 +++++++++++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 14 deletions(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index 9e3e929e6..00571e1e9 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -10,6 +10,7 @@ import sys import traceback import uuid +import warnings from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime @@ -486,8 +487,11 @@ def from_payload( class AdvancedJSONEncoder(json.JSONEncoder): """Advanced JSON encoder. - This encoder supports dataclasses, classes with dict() functions, and - all iterables as lists. + This encoder supports dataclasses and all iterables as lists. + + It also uses Pydantic's "model_dump" or "dict" methods if available on the + object, but with a warning that the dedicated pydantic data converter should + be used. """ def default(self, o: Any) -> Any: @@ -498,10 +502,16 @@ def default(self, o: Any) -> Any: # Dataclass support if dataclasses.is_dataclass(o): return dataclasses.asdict(o) - # Support for models with "dict" function like Pydantic - dict_fn = getattr(o, "dict", None) - if callable(dict_fn): - return dict_fn() + + # Deprecated support for Pydantic model instances + for pydantic_attr in ["model_dump", "dict"]: + to_dict = getattr(o, pydantic_attr, None) + if callable(to_dict): + warnings.warn( + "It looks like you're using pydantic. Please use temporalio.contrib.pydantic.converter.pydantic_data_converter." + ) + return to_dict() + # Support for non-list iterables like set if not isinstance(o, list) and isinstance(o, collections.abc.Iterable): return list(o) @@ -1523,15 +1533,19 @@ def value_to_type( # the start of this function. We retain the following for backwards # compatibility with pydantic users who are not using contrib.pydantic, but # this is deprecated. - parse_obj_attr = inspect.getattr_static(hint, "parse_obj", None) - if isinstance(parse_obj_attr, classmethod) or isinstance( - parse_obj_attr, staticmethod - ): - if not isinstance(value, dict): - raise TypeError( - f"Cannot convert to {hint}, value is {type(value)} not dict" + for pydantic_attr in ["model_validate", "parse_obj"]: + pydantic_method = inspect.getattr_static(hint, pydantic_attr, None) + if isinstance(pydantic_method, classmethod) or isinstance( + pydantic_method, staticmethod + ): + if not isinstance(value, dict): + raise TypeError( + f"Cannot convert to {hint}, value is {type(value)} not dict" + ) + warnings.warn( + "It looks like you're using pydantic. Please use temporalio.contrib.pydantic.converter.pydantic_data_converter." ) - return getattr(hint, "parse_obj")(value) + return getattr(hint, pydantic_attr)(value) # IntEnum if inspect.isclass(hint) and issubclass(hint, IntEnum): From 4d109c71576348f0becf68096fe7dde7025bfa0e Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 12:36:07 -0500 Subject: [PATCH 71/96] Cleanup --- temporalio/contrib/pydantic.py | 5 +---- temporalio/worker/workflow_sandbox/_restrictions.py | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 02c6dcd78..6a6938a99 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -12,10 +12,7 @@ """ import inspect -from typing import ( - Any, - Type, -) +from typing import Any, Type import pydantic diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 85049a5ea..62e5894ea 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -962,7 +962,7 @@ def _is_restrictable(v: Any) -> bool: str, bytes, bytearray, - datetime.date, # from which datetime.datetime inherits + datetime.date, # e.g. datetime.datetime ), ) From 90ce448405146d83b84f3a8d51683341992d0ac6 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 14:07:35 -0500 Subject: [PATCH 72/96] Support pydantic v1 and v2 --- poetry.lock | 6 +----- pyproject.toml | 3 +-- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index b48edfa5b..62fa9f8fc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,7 +7,6 @@ description = "Reusable constraint types to use with typing.Annotated" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"pydantic\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -1084,7 +1083,6 @@ description = "Data validation using Python type hints" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"pydantic\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1106,7 +1104,6 @@ description = "Core functionality for Pydantic validation and serialization" optional = true python-versions = ">=3.8" groups = ["main"] -markers = "extra == \"pydantic\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1904,9 +1901,8 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] grpc = ["grpcio"] opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] -pydantic = ["pydantic"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "595afb046373e5c826930cf0f1bc112f70cbfae14c72775c33221e96d66fc869" +content-hash = "6e479ea624b39564fb33a40ce6ad8f9139a9a8849182251bf84942fe456e36b0" diff --git a/pyproject.toml b/pyproject.toml index 740a03850..f4f0b1352 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ grpcio = {version = "^1.48.2", optional = true} opentelemetry-api = { version = "^1.11.1", optional = true } opentelemetry-sdk = { version = "^1.11.1", optional = true } protobuf = ">=3.20" -pydantic = { version = "^2.10.6", optional = true } +pydantic = { version = ">=1.10.0,<3.0.0", optional = true } python = "^3.9" python-dateutil = { version = "^2.8.2", python = "<3.11" } types-protobuf = ">=3.20" @@ -63,7 +63,6 @@ wheel = "^0.42.0" [tool.poetry.extras] opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] grpc = ["grpcio"] -pydantic = ["pydantic"] [tool.poetry.group.dev.dependencies] ruff = "^0.5.0" From be3b8f8c9d31eaaff2b4bbb9950d400bcefcdd61 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 17:17:39 -0500 Subject: [PATCH 73/96] Drop v1 support in data converter --- temporalio/contrib/pydantic.py | 28 ++++++---------------------- temporalio/converter.py | 6 +++--- 2 files changed, 9 insertions(+), 25 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 6a6938a99..9a906552b 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -1,4 +1,4 @@ -"""A data converter for Pydantic models +"""A data converter for Pydantic v2. To use, pass ``pydantic_data_converter`` as the ``data_converter`` argument to :py:class:`temporalio.client.Client`: @@ -9,18 +9,15 @@ data_converter=pydantic_data_converter, ... ) + +Pydantic v1 is not supported. """ import inspect from typing import Any, Type import pydantic - -try: - from pydantic_core import to_jsonable_python -except ImportError: - # pydantic v1 - from pydantic.json import pydantic_encoder as to_jsonable_python # type: ignore +from pydantic_core import to_jsonable_python from temporalio.converter import ( AdvancedJSONEncoder, @@ -39,23 +36,10 @@ class PydanticModelTypeConverter(JSONTypeConverter): """Type converter for pydantic model instances.""" def to_typed_value(self, hint: Type, value: Any) -> Any: - """Convert dict value to pydantic model instance of the specified type""" + """Convert value to pydantic model instance of the specified type""" if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): return JSONTypeConverter.Unhandled - model = hint - if not isinstance(value, dict): - raise TypeError( - f"Cannot convert to {model}, value is {type(value)} not dict" - ) - if hasattr(model, "model_validate"): - return model.model_validate(value) - elif hasattr(model, "parse_obj"): - # pydantic v1 - return model.parse_obj(value) - else: - raise ValueError( - f"{model} is a Pydantic model but does not have a `model_validate` or `parse_obj` method" - ) + return hint.model_validate(value) class PydanticJSONEncoder(AdvancedJSONEncoder): diff --git a/temporalio/converter.py b/temporalio/converter.py index 00571e1e9..973fac2a4 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -489,9 +489,9 @@ class AdvancedJSONEncoder(json.JSONEncoder): This encoder supports dataclasses and all iterables as lists. - It also uses Pydantic's "model_dump" or "dict" methods if available on the - object, but with a warning that the dedicated pydantic data converter should - be used. + It also uses Pydantic v1's "dict" methods if available on the object, + but this is deprecated. Pydantic users should upgrade to v2 and use + temporalio.contrib.pydantic.pydantic_data_converter. """ def default(self, o: Any) -> Any: From d3b525a8fadceaea133e965b572442aa9dd6f335 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 17:23:30 -0500 Subject: [PATCH 74/96] Drop v2 support outside contrib --- temporalio/converter.py | 47 ++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 26 deletions(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index 973fac2a4..2a4da1e4b 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -502,16 +502,10 @@ def default(self, o: Any) -> Any: # Dataclass support if dataclasses.is_dataclass(o): return dataclasses.asdict(o) - - # Deprecated support for Pydantic model instances - for pydantic_attr in ["model_dump", "dict"]: - to_dict = getattr(o, pydantic_attr, None) - if callable(to_dict): - warnings.warn( - "It looks like you're using pydantic. Please use temporalio.contrib.pydantic.converter.pydantic_data_converter." - ) - return to_dict() - + # Support for Pydantic v1's dict method + dict_fn = getattr(o, "dict", None) + if callable(dict_fn): + return dict_fn() # Support for non-list iterables like set if not isinstance(o, list) and isinstance(o, collections.abc.Iterable): return list(o) @@ -566,7 +560,13 @@ def encoding(self) -> str: def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" - # Let JSON conversion errors be thrown to caller + # Check for Pydantic v1 + if hasattr(value, "parse_obj"): + warnings.warn( + "If you're using Pydantic v1, upgrade to Pydantic v2 and use temporalio.contrib.pydantic.pydantic_data_converter. " + "If you cannot upgrade, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for better v1 support." + ) + # We let JSON conversion errors be thrown to caller return temporalio.api.common.v1.Payload( metadata={"encoding": self._encoding.encode()}, data=json.dumps( @@ -1527,25 +1527,20 @@ def value_to_type( return hint(**field_values) # Pydantic model instance - # Pydantic users should use + # Pydantic users should use Pydantic v2 with # temporalio.contrib.pydantic.pydantic_data_converter, in which case a # pydantic model instance will have been handled by the custom_converters at # the start of this function. We retain the following for backwards - # compatibility with pydantic users who are not using contrib.pydantic, but - # this is deprecated. - for pydantic_attr in ["model_validate", "parse_obj"]: - pydantic_method = inspect.getattr_static(hint, pydantic_attr, None) - if isinstance(pydantic_method, classmethod) or isinstance( - pydantic_method, staticmethod - ): - if not isinstance(value, dict): - raise TypeError( - f"Cannot convert to {hint}, value is {type(value)} not dict" - ) - warnings.warn( - "It looks like you're using pydantic. Please use temporalio.contrib.pydantic.converter.pydantic_data_converter." + # compatibility with pydantic v1 users, but this is deprecated. + parse_obj_attr = inspect.getattr_static(hint, "parse_obj", None) + if isinstance(parse_obj_attr, classmethod) or isinstance( + parse_obj_attr, staticmethod + ): + if not isinstance(value, dict): + raise TypeError( + f"Cannot convert to {hint}, value is {type(value)} not dict" ) - return getattr(hint, pydantic_attr)(value) + return getattr(hint, "parse_obj")(value) # IntEnum if inspect.isclass(hint) and issubclass(hint, IntEnum): From 89758fd6c6d9ccc3840150f1818d5b2984ef78f3 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 19:22:51 -0500 Subject: [PATCH 75/96] README --- README.md | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index f4bf5f8e0..efe7aed9b 100644 --- a/README.md +++ b/README.md @@ -297,10 +297,10 @@ other_ns_client = Client(**config) #### Data Conversion Data converters are used to convert raw Temporal payloads to/from actual Python types. A custom data converter of type -`temporalio.converter.DataConverter` can be set via the `data_converter` client parameter. Data converters are a -combination of payload converters, payload codecs, and failure converters. Payload converters convert Python values -to/from serialized bytes. Payload codecs convert bytes to bytes (e.g. for compression or encryption). Failure converters -convert exceptions to/from serialized failures. +`temporalio.converter.DataConverter` can be set via the `data_converter` parameter of the `Client` constructor. Data +converters are a combination of payload converters, payload codecs, and failure converters. Payload converters convert +Python values to/from serialized bytes. Payload codecs convert bytes to bytes (e.g. for compression or encryption). +Failure converters convert exceptions to/from serialized failures. The default data converter supports converting multiple types including: @@ -314,21 +314,31 @@ The default data converter supports converting multiple types including: * [IntEnum, StrEnum](https://docs.python.org/3/library/enum.html) based enumerates * [UUID](https://docs.python.org/3/library/uuid.html) -This notably doesn't include any `date`, `time`, or `datetime` objects as they may not work across SDKs. +To use pydantic model instances, see [](#pydantic-support). -Users are strongly encouraged to use a single `dataclass` for parameter and return types so fields with defaults can be -easily added without breaking compatibility. +`datetime.date`, `datetime.time`, and `datetime.datetime` can only be used as fields of Pydantic models. + +Users are strongly encouraged to use a single `dataclass` or Pydantic model for parameter and return types, so that fields +with defaults can be easily added without breaking compatibility. + +Classes with generics may not have the generics properly resolved. The current implementation does not have generic +type resolution. Users should use concrete types. + +##### Pydantic Support + +To use Pydantic model instances, install Pydantic and set the Pydantic data converter when creating client instances: -To use pydantic model instances (or python objects containing pydantic model instances), use ```python from temporalio.contrib.pydantic import pydantic_data_converter client = Client(data_converter=pydantic_data_converter, ...) ``` + +Pydantic v1 is not supported by this data converter. If you are not yet able to upgrade from Pydantic v1, see +https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for limited v1 support. + Do not use pydantic's [strict mode](https://docs.pydantic.dev/latest/concepts/strict_mode/). -Classes with generics may not have the generics properly resolved. The current implementation does not have generic -type resolution. Users should use concrete types. ##### Custom Type Data Conversion @@ -1334,7 +1344,7 @@ async def check_past_histories(my_client: Client): OpenTelemetry support requires the optional `opentelemetry` dependencies which are part of the `opentelemetry` extra. When using `pip`, running - pip install temporalio[opentelemetry] + pip install 'temporalio[opentelemetry]' will install needed dependencies. Then the `temporalio.contrib.opentelemetry.TracingInterceptor` can be created and set as an interceptor on the `interceptors` argument of `Client.connect`. When set, spans will be created for all client From 24d489c7397d4880c7f459e0d0f63f50bd3cee87 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 20:03:36 -0500 Subject: [PATCH 76/96] Include pydantic extra --- poetry.lock | 6 +++++- pyproject.toml | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 62fa9f8fc..468f4db78 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,6 +7,7 @@ description = "Reusable constraint types to use with typing.Annotated" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"pydantic\"" files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -1083,6 +1084,7 @@ description = "Data validation using Python type hints" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"pydantic\"" files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -1104,6 +1106,7 @@ description = "Core functionality for Pydantic validation and serialization" optional = true python-versions = ">=3.8" groups = ["main"] +markers = "extra == \"pydantic\"" files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -1901,8 +1904,9 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] grpc = ["grpcio"] opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] +pydantic = ["pydantic"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "6e479ea624b39564fb33a40ce6ad8f9139a9a8849182251bf84942fe456e36b0" +content-hash = "8279e4840e0d8124cfec01cf11c5076f090583d33580bb4e39adcd1f168105df" diff --git a/pyproject.toml b/pyproject.toml index f4f0b1352..8c637bf79 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,8 +61,9 @@ twine = "^4.0.1" wheel = "^0.42.0" [tool.poetry.extras] -opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] grpc = ["grpcio"] +opentelemetry = ["opentelemetry-api", "opentelemetry-sdk"] +pydantic = ["pydantic"] [tool.poetry.group.dev.dependencies] ruff = "^0.5.0" From 7e7c6d4109b1f08422dce62fad1033a8e2ee9d9e Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 20:37:20 -0500 Subject: [PATCH 77/96] date instances are no longer proxied --- tests/worker/workflow_sandbox/test_runner.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/worker/workflow_sandbox/test_runner.py b/tests/worker/workflow_sandbox/test_runner.py index d367fed2c..14b2c94c3 100644 --- a/tests/worker/workflow_sandbox/test_runner.py +++ b/tests/worker/workflow_sandbox/test_runner.py @@ -14,7 +14,6 @@ import pytest -import temporalio.worker.workflow_sandbox._restrictions from temporalio import activity, workflow from temporalio.client import Client, WorkflowFailureError, WorkflowHandle from temporalio.exceptions import ApplicationError @@ -262,10 +261,6 @@ async def test_workflow_sandbox_restrictions(client: Client): class DateOperatorWorkflow: @workflow.run async def run(self) -> int: - assert ( - type(date(2010, 1, 20)) - == temporalio.worker.workflow_sandbox._restrictions._RestrictedProxy - ) return (date(2010, 1, 20) - date(2010, 1, 1)).days From 3de8f286befc4aa8b75bcc8e15e9dc56f16a0e43 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 21:47:24 -0500 Subject: [PATCH 78/96] Fix tests on Windows --- tests/worker/workflow_sandbox/test_restrictions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/worker/workflow_sandbox/test_restrictions.py b/tests/worker/workflow_sandbox/test_restrictions.py index 99ef49378..cf96d28d6 100644 --- a/tests/worker/workflow_sandbox/test_restrictions.py +++ b/tests/worker/workflow_sandbox/test_restrictions.py @@ -91,8 +91,9 @@ def test_restricted_proxy_dunder_methods(): assert isinstance(format(restricted_path, ""), str) restricted_path_obj = restricted_path("test/path") assert type(restricted_path_obj) is _RestrictedProxy - assert format(restricted_path_obj, "") == "test/path" - assert f"{restricted_path_obj}" == "test/path" + expected_path = str(pathlib.PurePath("test/path")) + assert format(restricted_path_obj, "") == expected_path + assert f"{restricted_path_obj}" == expected_path def test_workflow_sandbox_restricted_proxy(): From b2f62de0419f386f7bd1985268f8438d370554a7 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Mon, 10 Feb 2025 21:48:39 -0500 Subject: [PATCH 79/96] doctoc --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index efe7aed9b..0c7acf4ba 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,7 @@ informal introduction to the features and their implementation. - [Usage](#usage) - [Client](#client) - [Data Conversion](#data-conversion) + - [Pydantic Support](#pydantic-support) - [Custom Type Data Conversion](#custom-type-data-conversion) - [Workers](#workers) - [Workflows](#workflows) From 975a1fc2a7134ee7d421e0296bedf030e25102d8 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 11 Feb 2025 10:04:58 -0500 Subject: [PATCH 80/96] Restrict pydantic range to v2 in pyproject.toml --- poetry.lock | 2 +- pyproject.toml | 2 +- temporalio/worker/workflow_sandbox/_restrictions.py | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 468f4db78..6b11c570b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1909,4 +1909,4 @@ pydantic = ["pydantic"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "8279e4840e0d8124cfec01cf11c5076f090583d33580bb4e39adcd1f168105df" +content-hash = "fe88ba77a85c62862831e8286dddfc0530d1b7ad3c5c38be31f1508fd496c6e2" diff --git a/pyproject.toml b/pyproject.toml index 8c637bf79..bf2a7b2a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ grpcio = {version = "^1.48.2", optional = true} opentelemetry-api = { version = "^1.11.1", optional = true } opentelemetry-sdk = { version = "^1.11.1", optional = true } protobuf = ">=3.20" -pydantic = { version = ">=1.10.0,<3.0.0", optional = true } +pydantic = { version = "^2.0.0", optional = true } python = "^3.9" python-dateutil = { version = "^2.8.2", python = "<3.11" } types-protobuf = ">=3.20" diff --git a/temporalio/worker/workflow_sandbox/_restrictions.py b/temporalio/worker/workflow_sandbox/_restrictions.py index 62e5894ea..3796fd7aa 100644 --- a/temporalio/worker/workflow_sandbox/_restrictions.py +++ b/temporalio/worker/workflow_sandbox/_restrictions.py @@ -447,6 +447,7 @@ def with_child_unrestricted(self, *child_path: str) -> SandboxMatcher: # Very general modules needed by many things including pytest's # assertion rewriter "typing", + # Required for Pydantic TypedDict fields. "typing_extensions", # Required due to https://github.com/protocolbuffers/protobuf/issues/10143 # for older versions. This unfortunately means that on those versions, From 17c06ec826f8cf51614e975aaa75d2bc6a37dd0b Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 11 Feb 2025 10:29:40 -0500 Subject: [PATCH 81/96] Fix warning message --- temporalio/converter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index 2a4da1e4b..cd90c3df1 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -563,8 +563,8 @@ def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: # Check for Pydantic v1 if hasattr(value, "parse_obj"): warnings.warn( - "If you're using Pydantic v1, upgrade to Pydantic v2 and use temporalio.contrib.pydantic.pydantic_data_converter. " - "If you cannot upgrade, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for better v1 support." + "If you're using Pydantic v2, use temporalio.contrib.pydantic.pydantic_data_converter. " + "If you're using Pydantic v1 and cannot upgrade, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for better v1 support." ) # We let JSON conversion errors be thrown to caller return temporalio.api.common.v1.Payload( From d45bbcd65b81855de725bc179c1d47b1d596c93b Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 11 Feb 2025 13:58:07 -0500 Subject: [PATCH 82/96] Failing test for strict mode --- tests/contrib/pydantic/models.py | 4 ++++ tests/contrib/pydantic/test_pydantic.py | 30 +++++++++++++++++++++++++ tests/contrib/pydantic/workflows.py | 22 ++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/tests/contrib/pydantic/models.py b/tests/contrib/pydantic/models.py index 85c4d8c65..bffda16a9 100644 --- a/tests/contrib/pydantic/models.py +++ b/tests/contrib/pydantic/models.py @@ -342,3 +342,7 @@ def make_dataclass_objects() -> List[MyDataClass]: ComplexCustomType = Tuple[List[MyDataClass], List[PydanticModels]] ComplexCustomUnionType = List[Union[MyDataClass, PydanticModels]] + + +class PydanticModelWithStrictField(BaseModel): + strict_field: datetime = Field(strict=True) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index f9a5c0127..107066a6b 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -1,5 +1,6 @@ import dataclasses import uuid +from datetime import datetime from pydantic import BaseModel @@ -7,6 +8,7 @@ from temporalio.contrib.pydantic import pydantic_data_converter from temporalio.worker import Worker from tests.contrib.pydantic.models import ( + PydanticModelWithStrictField, make_dataclass_objects, make_list_of_pydantic_objects, ) @@ -17,7 +19,9 @@ DatetimeUsageWorkflow, InstantiateModelsWorkflow, PydanticModelUsageWorkflow, + PydanticModelWithStrictFieldWorkflow, RoundTripObjectsWorkflow, + _test_pydantic_model_with_strict_field, clone_objects, pydantic_models_activity, ) @@ -206,3 +210,29 @@ async def test_datetime_usage_in_workflow(client: Client): id=str(uuid.uuid4()), task_queue=task_queue_name, ) + + +def test_pydantic_model_with_strict_field_outside_sandbox(): + _test_pydantic_model_with_strict_field( + PydanticModelWithStrictField(strict_field=datetime(2025, 1, 2, 3, 4, 5)) + ) + + +async def test_pydantic_model_with_strict_field_inside_sandbox(client: Client): + client_config = client.config() + client_config["data_converter"] = pydantic_data_converter + client = Client(**client_config) + tq = str(uuid.uuid4()) + async with Worker( + client, + workflows=[PydanticModelWithStrictFieldWorkflow], + task_queue=tq, + ): + orig = PydanticModelWithStrictField(strict_field=datetime(2025, 1, 2, 3, 4, 5)) + result = await client.execute_workflow( + PydanticModelWithStrictFieldWorkflow.run, + orig, + id=str(uuid.uuid4()), + task_queue=tq, + ) + assert result == orig diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index 1b1bc359a..be5769a0f 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -13,6 +13,7 @@ ComplexCustomType, ComplexCustomUnionType, PydanticModels, + PydanticModelWithStrictField, make_list_of_pydantic_objects, ) @@ -110,3 +111,24 @@ async def run(self) -> None: dt = workflow.now() assert isinstance(dt, datetime) assert issubclass(dt.__class__, datetime) + + +def _test_pydantic_model_with_strict_field( + obj: PydanticModelWithStrictField, +): + roundtripped = PydanticModelWithStrictField.model_validate(obj.model_dump()) + assert roundtripped == obj + roundtripped2 = PydanticModelWithStrictField.model_validate_json( + obj.model_dump_json() + ) + assert roundtripped2 == obj + return roundtripped + + +@workflow.defn +class PydanticModelWithStrictFieldWorkflow: + @workflow.run + async def run( + self, obj: PydanticModelWithStrictField + ) -> PydanticModelWithStrictField: + return _test_pydantic_model_with_strict_field(obj) From 5cf962f2781750008ffe29a5ae4444adbdaf3c09 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Tue, 11 Feb 2025 23:33:10 -0500 Subject: [PATCH 83/96] Hand over entirely to pydantic --- README.md | 14 ++++-- temporalio/contrib/pydantic.py | 85 ++++++++++++++++++++++------------ 2 files changed, 65 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index 0c7acf4ba..119b64512 100644 --- a/README.md +++ b/README.md @@ -317,10 +317,11 @@ The default data converter supports converting multiple types including: To use pydantic model instances, see [](#pydantic-support). -`datetime.date`, `datetime.time`, and `datetime.datetime` can only be used as fields of Pydantic models. +`datetime.date`, `datetime.time`, and `datetime.datetime` can only be used with the Pydantic data converter. -Users are strongly encouraged to use a single `dataclass` or Pydantic model for parameter and return types, so that fields -with defaults can be easily added without breaking compatibility. +Although workflows, updates, signals, and queries can all be defined with multiple input parameters, users are strongly +encouraged to use a single `dataclass` or Pydantic model parameter, so that fields with defaults can be easily added +without breaking compatibility. Similar advice applies to return values. Classes with generics may not have the generics properly resolved. The current implementation does not have generic type resolution. Users should use concrete types. @@ -335,11 +336,14 @@ from temporalio.contrib.pydantic import pydantic_data_converter client = Client(data_converter=pydantic_data_converter, ...) ``` +This data converter supports conversion of all types supported by Pydantic to and from JSON. + +In addition to Pydantic models, these include all `json.dump`-able types, various non-`json.dump`-able standard library +types such as dataclasses, types from the datetime module, sets, UUID, etc, and custom types composed of any of these. + Pydantic v1 is not supported by this data converter. If you are not yet able to upgrade from Pydantic v1, see https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for limited v1 support. -Do not use pydantic's [strict mode](https://docs.pydantic.dev/latest/concepts/strict_mode/). - ##### Custom Type Data Conversion diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 9a906552b..2eb0b158c 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -13,63 +13,83 @@ Pydantic v1 is not supported. """ -import inspect -from typing import Any, Type +from typing import Any, Optional, Type -import pydantic -from pydantic_core import to_jsonable_python +from pydantic import TypeAdapter, ValidationError +from pydantic_core import to_json +import temporalio.api.common.v1 from temporalio.converter import ( - AdvancedJSONEncoder, CompositePayloadConverter, DataConverter, DefaultPayloadConverter, + EncodingPayloadConverter, JSONPlainPayloadConverter, - JSONTypeConverter, ) # Note that in addition to the implementation in this module, _RestrictedProxy # implements __get_pydantic_core_schema__ so that pydantic unwraps proxied types. -class PydanticModelTypeConverter(JSONTypeConverter): - """Type converter for pydantic model instances.""" +class PydanticJSONPlainPayloadConverter(EncodingPayloadConverter): + """Pydantic JSON payload converter. - def to_typed_value(self, hint: Type, value: Any) -> Any: - """Convert value to pydantic model instance of the specified type""" - if not inspect.isclass(hint) or not issubclass(hint, pydantic.BaseModel): - return JSONTypeConverter.Unhandled - return hint.model_validate(value) + Supports conversion of all types supported by Pydantic to and from JSON. + In addition to Pydantic models, these include all `json.dump`-able types, + various non-`json.dump`-able standard library types such as dataclasses, + types from the datetime module, sets, UUID, etc, and custom types composed + of any of these. -class PydanticJSONEncoder(AdvancedJSONEncoder): - """JSON encoder for python objects containing pydantic model instances.""" + See https://docs.pydantic.dev/latest/api/standard_library_types/ + """ + + @property + def encoding(self) -> str: + """See base class.""" + return "json/plain" + + def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class. - def default(self, o: Any) -> Any: - """Convert object to jsonable python. + Uses :py:func:`pydantic_core.to_json` to serialize ``value` to JSON. - See :py:meth:`json.JSONEncoder.default`. + See + https://docs.pydantic.dev/latest/api/pydantic_core/#pydantic_core.to_json. """ - if isinstance(o, pydantic.BaseModel): - return to_jsonable_python(o) - return super().default(o) + return temporalio.api.common.v1.Payload( + metadata={"encoding": self.encoding.encode()}, data=to_json(value) + ) + + def from_payload( + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, + ) -> Any: + """See base class. + + Uses :py:func:`pydantic.TypeAdapter.validate_json` to construct an + instance of the type specified by ``type_hint`` from the JSON payload. + + See + https://docs.pydantic.dev/latest/api/type_adapter/#pydantic.type_adapter.TypeAdapter.validate_json. + """ + try: + return TypeAdapter(type_hint).validate_json(payload.data) + except ValidationError as err: + raise RuntimeError("Failed parsing") from err class PydanticPayloadConverter(CompositePayloadConverter): """Payload converter for payloads containing pydantic model instances. JSON conversion is replaced with a converter that uses - :py:class:`PydanticJSONEncoder` to convert the python object to JSON, and - :py:class:`PydanticModelTypeConverter` to convert raw python values to - pydantic model instances. + :py:class:`PydanticJSONPlainPayloadConverter`. """ def __init__(self) -> None: """Initialize object""" - json_payload_converter = JSONPlainPayloadConverter( - encoder=PydanticJSONEncoder, - custom_type_converters=[PydanticModelTypeConverter()], - ) + json_payload_converter = PydanticJSONPlainPayloadConverter() super().__init__( *( c @@ -83,7 +103,14 @@ def __init__(self) -> None: pydantic_data_converter = DataConverter( payload_converter_class=PydanticPayloadConverter ) -"""Data converter for payloads containing pydantic model instances. +"""Pydantic data converter. + +Supports conversion of all types supported by Pydantic to and from JSON. + +In addition to Pydantic models, these include all `json.dump`-able types, +various non-`json.dump`-able standard library types such as dataclasses, +types from the datetime module, sets, UUID, etc, and custom types composed +of any of these. To use, pass as the ``data_converter`` argument of :py:class:`temporalio.client.Client` """ From b6d3336f0fa6255676e9dbbb701dda93f0fed712 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Wed, 12 Feb 2025 16:48:09 -0500 Subject: [PATCH 84/96] Test round-trip misc top-level objects --- tests/contrib/pydantic/activities.py | 11 ++++++- tests/contrib/pydantic/test_pydantic.py | 39 ++++++++++++++++++++----- tests/contrib/pydantic/workflows.py | 25 ++++++++++++---- 3 files changed, 62 insertions(+), 13 deletions(-) diff --git a/tests/contrib/pydantic/activities.py b/tests/contrib/pydantic/activities.py index 780203351..b6709d24d 100644 --- a/tests/contrib/pydantic/activities.py +++ b/tests/contrib/pydantic/activities.py @@ -1,11 +1,20 @@ +from datetime import datetime from typing import List +from uuid import UUID from temporalio import activity from tests.contrib.pydantic.models import PydanticModels @activity.defn -async def pydantic_models_activity( +async def pydantic_objects_activity( models: List[PydanticModels], ) -> List[PydanticModels]: return models + + +@activity.defn +async def misc_objects_activity( + models: tuple[datetime, UUID], +) -> tuple[datetime, UUID]: + return models diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index 107066a6b..069b61e6c 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -20,10 +20,12 @@ InstantiateModelsWorkflow, PydanticModelUsageWorkflow, PydanticModelWithStrictFieldWorkflow, - RoundTripObjectsWorkflow, + RoundTripMiscObjectsWorkflow, + RoundTripPydanticObjectsWorkflow, _test_pydantic_model_with_strict_field, clone_objects, - pydantic_models_activity, + misc_objects_activity, + pydantic_objects_activity, ) @@ -60,11 +62,11 @@ async def test_round_trip_pydantic_objects(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[RoundTripObjectsWorkflow], - activities=[pydantic_models_activity], + workflows=[RoundTripPydanticObjectsWorkflow], + activities=[pydantic_objects_activity], ): returned_objects = await client.execute_workflow( - RoundTripObjectsWorkflow.run, + RoundTripPydanticObjectsWorkflow.run, orig_objects, id=str(uuid.uuid4()), task_queue=task_queue_name, @@ -74,6 +76,29 @@ async def test_round_trip_pydantic_objects(client: Client): o._check_instance() +async def test_round_trip_misc_objects(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + orig_objects = (datetime(2025, 1, 2, 3, 4, 5), uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[RoundTripMiscObjectsWorkflow], + activities=[misc_objects_activity], + ): + returned_objects = await client.execute_workflow( + RoundTripMiscObjectsWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert returned_objects == orig_objects + + async def test_clone_objects_outside_sandbox(): clone_objects(make_list_of_pydantic_objects()) @@ -115,7 +140,7 @@ async def test_complex_custom_type(client: Client): client, task_queue=task_queue_name, workflows=[ComplexCustomTypeWorkflow], - activities=[pydantic_models_activity], + activities=[pydantic_objects_activity], ): ( returned_dataclass_objects, @@ -149,7 +174,7 @@ async def test_complex_custom_union_type(client: Client): client, task_queue=task_queue_name, workflows=[ComplexCustomUnionTypeWorkflow], - activities=[pydantic_models_activity], + activities=[pydantic_objects_activity], ): returned_objects = await client.execute_workflow( ComplexCustomUnionTypeWorkflow.run, diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index be5769a0f..82bd685bf 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -1,13 +1,17 @@ import dataclasses from datetime import datetime, timedelta from typing import List +from uuid import UUID from pydantic import BaseModel, create_model from temporalio import workflow with workflow.unsafe.imports_passed_through(): - from tests.contrib.pydantic.activities import pydantic_models_activity + from tests.contrib.pydantic.activities import ( + misc_objects_activity, + pydantic_objects_activity, + ) from tests.contrib.pydantic.models import ( ComplexCustomType, @@ -39,11 +43,22 @@ async def run(self) -> None: @workflow.defn -class RoundTripObjectsWorkflow: +class RoundTripPydanticObjectsWorkflow: @workflow.run async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: return await workflow.execute_activity( - pydantic_models_activity, + pydantic_objects_activity, + objects, + start_to_close_timeout=timedelta(minutes=1), + ) + + +@workflow.defn +class RoundTripMiscObjectsWorkflow: + @workflow.run + async def run(self, objects: tuple[datetime, UUID]) -> tuple[datetime, UUID]: + return await workflow.execute_activity( + misc_objects_activity, objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -73,7 +88,7 @@ async def run( else: raise TypeError(f"Unexpected type: {type(o)}") pydantic_objects = await workflow.execute_activity( - pydantic_models_activity, + pydantic_objects_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) @@ -89,7 +104,7 @@ async def run( ) -> ComplexCustomType: data_classes, pydantic_objects = input pydantic_objects = await workflow.execute_activity( - pydantic_models_activity, + pydantic_objects_activity, pydantic_objects, start_to_close_timeout=timedelta(minutes=1), ) From 051b00379022b04bb990a0b7d520c2b7897533fe Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Wed, 12 Feb 2025 19:04:26 -0500 Subject: [PATCH 85/96] Define special types model in the sandbox --- tests/contrib/pydantic/models.py | 70 ++++++++++++++++++++++++++++-- tests/contrib/pydantic/models_2.py | 68 ----------------------------- 2 files changed, 67 insertions(+), 71 deletions(-) diff --git a/tests/contrib/pydantic/models.py b/tests/contrib/pydantic/models.py index bffda16a9..b5c5de756 100644 --- a/tests/contrib/pydantic/models.py +++ b/tests/contrib/pydantic/models.py @@ -1,5 +1,7 @@ import dataclasses -from datetime import date, datetime, timedelta +import uuid +from datetime import date, datetime, time, timedelta, timezone +from ipaddress import IPv4Address from pathlib import Path from typing import ( Annotated, @@ -22,10 +24,8 @@ with workflow.unsafe.imports_passed_through(): from tests.contrib.pydantic.models_2 import ( ComplexTypesModel, - SpecialTypesModel, StandardTypesModel, make_complex_types_object, - make_special_types_object, make_standard_types_object, ) @@ -33,6 +33,70 @@ ShortSequence = Annotated[SequenceType, Len(max_length=2)] +class SpecialTypesModel(BaseModel): + datetime_field: datetime + datetime_field_int: datetime + datetime_field_float: datetime + datetime_field_str_formatted: datetime + datetime_field_str_int: datetime + datetime_field_date: datetime + + time_field: time + time_field_str: time + + date_field: date + timedelta_field: timedelta + path_field: Path + uuid_field: uuid.UUID + ip_field: IPv4Address + + def _check_instance(self) -> None: + dt = datetime(2000, 1, 2, 3, 4, 5) + dtz = datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) + assert isinstance(self.datetime_field, datetime) + assert isinstance(self.datetime_field_int, datetime) + assert isinstance(self.datetime_field_float, datetime) + assert isinstance(self.datetime_field_str_formatted, datetime) + assert isinstance(self.datetime_field_str_int, datetime) + assert isinstance(self.datetime_field_date, datetime) + assert isinstance(self.timedelta_field, timedelta) + assert isinstance(self.path_field, Path) + assert isinstance(self.uuid_field, uuid.UUID) + assert isinstance(self.ip_field, IPv4Address) + assert self.datetime_field == dt + assert self.datetime_field_int == dtz + assert self.datetime_field_float == dtz + assert self.datetime_field_str_formatted == dtz + assert self.datetime_field_str_int == dtz + assert self.datetime_field_date == datetime(2000, 1, 2) + assert self.time_field == time(3, 4, 5) + assert self.time_field_str == time(3, 4, 5, tzinfo=timezone.utc) + assert self.date_field == date(2000, 1, 2) + assert self.timedelta_field == timedelta(days=1, hours=2) + assert self.path_field == Path("test/path") + assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") + assert self.ip_field == IPv4Address("127.0.0.1") + + +def make_special_types_object() -> SpecialTypesModel: + return SpecialTypesModel( + datetime_field=datetime(2000, 1, 2, 3, 4, 5), + # 946800245 + datetime_field_int=946782245, # type: ignore + datetime_field_float=946782245.0, # type: ignore + datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore + datetime_field_str_int="946782245", # type: ignore + datetime_field_date=datetime(2000, 1, 2), + time_field=time(3, 4, 5), + time_field_str="03:04:05Z", # type: ignore + date_field=date(2000, 1, 2), + timedelta_field=timedelta(days=1, hours=2), + path_field=Path("test/path"), + uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), + ip_field=IPv4Address("127.0.0.1"), + ) + + class ChildModel(BaseModel): name: str value: int diff --git a/tests/contrib/pydantic/models_2.py b/tests/contrib/pydantic/models_2.py index 51f213d8a..52d36824b 100644 --- a/tests/contrib/pydantic/models_2.py +++ b/tests/contrib/pydantic/models_2.py @@ -2,11 +2,7 @@ import decimal import fractions import re -import uuid -from datetime import date, datetime, time, timedelta, timezone from enum import Enum, IntEnum -from ipaddress import IPv4Address -from pathlib import Path from typing import ( Any, Dict, @@ -246,67 +242,3 @@ def make_complex_types_object() -> ComplexTypesModel: optional_field="present", named_tuple_field=Point(x=1, y=2), ) - - -class SpecialTypesModel(BaseModel): - datetime_field: datetime - datetime_field_int: datetime - datetime_field_float: datetime - datetime_field_str_formatted: datetime - datetime_field_str_int: datetime - datetime_field_date: datetime - - time_field: time - time_field_str: time - - date_field: date - timedelta_field: timedelta - path_field: Path - uuid_field: uuid.UUID - ip_field: IPv4Address - - def _check_instance(self) -> None: - dt = datetime(2000, 1, 2, 3, 4, 5) - dtz = datetime(2000, 1, 2, 3, 4, 5, tzinfo=timezone.utc) - assert isinstance(self.datetime_field, datetime) - assert isinstance(self.datetime_field_int, datetime) - assert isinstance(self.datetime_field_float, datetime) - assert isinstance(self.datetime_field_str_formatted, datetime) - assert isinstance(self.datetime_field_str_int, datetime) - assert isinstance(self.datetime_field_date, datetime) - assert isinstance(self.timedelta_field, timedelta) - assert isinstance(self.path_field, Path) - assert isinstance(self.uuid_field, uuid.UUID) - assert isinstance(self.ip_field, IPv4Address) - assert self.datetime_field == dt - assert self.datetime_field_int == dtz - assert self.datetime_field_float == dtz - assert self.datetime_field_str_formatted == dtz - assert self.datetime_field_str_int == dtz - assert self.datetime_field_date == datetime(2000, 1, 2) - assert self.time_field == time(3, 4, 5) - assert self.time_field_str == time(3, 4, 5, tzinfo=timezone.utc) - assert self.date_field == date(2000, 1, 2) - assert self.timedelta_field == timedelta(days=1, hours=2) - assert self.path_field == Path("test/path") - assert self.uuid_field == uuid.UUID("12345678-1234-5678-1234-567812345678") - assert self.ip_field == IPv4Address("127.0.0.1") - - -def make_special_types_object() -> SpecialTypesModel: - return SpecialTypesModel( - datetime_field=datetime(2000, 1, 2, 3, 4, 5), - # 946800245 - datetime_field_int=946782245, # type: ignore - datetime_field_float=946782245.0, # type: ignore - datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore - datetime_field_str_int="946782245", # type: ignore - datetime_field_date=datetime(2000, 1, 2), - time_field=time(3, 4, 5), - time_field_str="03:04:05Z", # type: ignore - date_field=date(2000, 1, 2), - timedelta_field=timedelta(days=1, hours=2), - path_field=Path("test/path"), - uuid_field=uuid.UUID("12345678-1234-5678-1234-567812345678"), - ip_field=IPv4Address("127.0.0.1"), - ) From b61d06e3b219641bce8fc0e626cdc324fc38f566 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Wed, 12 Feb 2025 19:11:17 -0500 Subject: [PATCH 86/96] Test strict versions of some models --- tests/contrib/pydantic/models.py | 17 ++++++++++++++++- tests/contrib/pydantic/models_2.py | 9 +++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/tests/contrib/pydantic/models.py b/tests/contrib/pydantic/models.py index b5c5de756..808447726 100644 --- a/tests/contrib/pydantic/models.py +++ b/tests/contrib/pydantic/models.py @@ -13,10 +13,11 @@ Tuple, TypeVar, Union, + cast, ) from annotated_types import Len -from pydantic import BaseModel, Field, WithJsonSchema +from pydantic import BaseModel, ConfigDict, Field, WithJsonSchema from temporalio import workflow @@ -25,8 +26,10 @@ from tests.contrib.pydantic.models_2 import ( ComplexTypesModel, StandardTypesModel, + StrictStandardTypesModel, make_complex_types_object, make_standard_types_object, + make_strict_standard_types_object, ) SequenceType = TypeVar("SequenceType", bound=Sequence[Any]) @@ -97,6 +100,14 @@ def make_special_types_object() -> SpecialTypesModel: ) +class StrictSpecialTypesModel(SpecialTypesModel): + model_config = ConfigDict(strict=True) + + +def make_strict_special_types_object() -> StrictSpecialTypesModel: + return cast(StrictSpecialTypesModel, make_special_types_object()) + + class ChildModel(BaseModel): name: str value: int @@ -362,8 +373,10 @@ def _assert_timedelta_validity(td: timedelta): PydanticModels = Union[ StandardTypesModel, + StrictStandardTypesModel, ComplexTypesModel, SpecialTypesModel, + StrictSpecialTypesModel, ParentModel, FieldFeaturesModel, AnnotatedFieldsModel, @@ -378,8 +391,10 @@ def _assert_timedelta_validity(td: timedelta): def make_list_of_pydantic_objects() -> List[PydanticModels]: objects = [ make_standard_types_object(), + make_strict_standard_types_object(), make_complex_types_object(), make_special_types_object(), + make_strict_special_types_object(), make_nested_object(), make_field_features_object(), make_annotated_fields_object(), diff --git a/tests/contrib/pydantic/models_2.py b/tests/contrib/pydantic/models_2.py index 52d36824b..f04447fd0 100644 --- a/tests/contrib/pydantic/models_2.py +++ b/tests/contrib/pydantic/models_2.py @@ -15,6 +15,7 @@ Set, Tuple, Union, + cast, ) from pydantic import BaseModel @@ -202,6 +203,14 @@ def make_standard_types_object() -> StandardTypesModel: ) +class StrictStandardTypesModel(StandardTypesModel, strict=True): + pass + + +def make_strict_standard_types_object() -> StrictStandardTypesModel: + return cast(StrictStandardTypesModel, make_standard_types_object()) + + class Point(NamedTuple): x: int y: int From 7c7529c636c8c6c2acf6ed2d94ed092a6e9a91c4 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 08:38:26 -0500 Subject: [PATCH 87/96] Fix link --- temporalio/converter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/temporalio/converter.py b/temporalio/converter.py index cd90c3df1..e52a344b8 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -564,7 +564,7 @@ def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: if hasattr(value, "parse_obj"): warnings.warn( "If you're using Pydantic v2, use temporalio.contrib.pydantic.pydantic_data_converter. " - "If you're using Pydantic v1 and cannot upgrade, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter/v1 for better v1 support." + "If you're using Pydantic v1 and cannot upgrade, refer to https://github.com/temporalio/samples-python/tree/main/pydantic_converter_v1 for better v1 support." ) # We let JSON conversion errors be thrown to caller return temporalio.api.common.v1.Payload( From 23c88e89f424c50ec9baf421dec08bad70281f6a Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 09:01:04 -0500 Subject: [PATCH 88/96] Add more misc standard lib types to test --- tests/contrib/pydantic/activities.py | 20 ++++++++++++++++++-- tests/contrib/pydantic/test_pydantic.py | 10 +++++++++- tests/contrib/pydantic/workflows.py | 21 ++++++++++++++++++++- 3 files changed, 47 insertions(+), 4 deletions(-) diff --git a/tests/contrib/pydantic/activities.py b/tests/contrib/pydantic/activities.py index b6709d24d..ad1b86507 100644 --- a/tests/contrib/pydantic/activities.py +++ b/tests/contrib/pydantic/activities.py @@ -15,6 +15,22 @@ async def pydantic_objects_activity( @activity.defn async def misc_objects_activity( - models: tuple[datetime, UUID], -) -> tuple[datetime, UUID]: + models: tuple[ + int, + str, + dict[str, float], + list[dict[str, float]], + tuple[dict[str, float]], + datetime, + UUID, + ], +) -> tuple[ + int, + str, + dict[str, float], + list[dict[str, float]], + tuple[dict[str, float]], + datetime, + UUID, +]: return models diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index 069b61e6c..f9278cbbc 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -82,7 +82,15 @@ async def test_round_trip_misc_objects(client: Client): client = Client(**new_config) task_queue_name = str(uuid.uuid4()) - orig_objects = (datetime(2025, 1, 2, 3, 4, 5), uuid.uuid4()) + orig_objects = ( + 7, + "7", + {"7": 7.0}, + [{"7": 7.0}], + ({"7": 7.0},), + datetime(2025, 1, 2, 3, 4, 5), + uuid.uuid4(), + ) async with Worker( client, diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index 82bd685bf..af519b079 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -56,7 +56,26 @@ async def run(self, objects: List[PydanticModels]) -> List[PydanticModels]: @workflow.defn class RoundTripMiscObjectsWorkflow: @workflow.run - async def run(self, objects: tuple[datetime, UUID]) -> tuple[datetime, UUID]: + async def run( + self, + objects: tuple[ + int, + str, + dict[str, float], + list[dict[str, float]], + tuple[dict[str, float]], + datetime, + UUID, + ], + ) -> tuple[ + int, + str, + dict[str, float], + list[dict[str, float]], + tuple[dict[str, float]], + datetime, + UUID, + ]: return await workflow.execute_activity( misc_objects_activity, objects, From b3c7d7353e1977982a64069d20f8dccd1648c83a Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 12:33:37 -0500 Subject: [PATCH 89/96] Failing test --- tests/contrib/pydantic/test_pydantic.py | 47 +++++++++++++++++++++---- tests/contrib/pydantic/workflows.py | 7 ++++ 2 files changed, 47 insertions(+), 7 deletions(-) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index f9278cbbc..af8a08cef 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -2,12 +2,14 @@ import uuid from datetime import datetime +import pytest from pydantic import BaseModel from temporalio.client import Client from temporalio.contrib.pydantic import pydantic_data_converter from temporalio.worker import Worker from tests.contrib.pydantic.models import ( + PydanticModels, PydanticModelWithStrictField, make_dataclass_objects, make_list_of_pydantic_objects, @@ -18,6 +20,7 @@ ComplexCustomUnionTypeWorkflow, DatetimeUsageWorkflow, InstantiateModelsWorkflow, + NoTypeAnnotationsWorkflow, PydanticModelUsageWorkflow, PydanticModelWithStrictFieldWorkflow, RoundTripMiscObjectsWorkflow, @@ -51,7 +54,8 @@ async def test_instantiation_inside_sandbox(client: Client): ) -async def test_round_trip_pydantic_objects(client: Client): +@pytest.mark.parametrize("typed", [True, False]) +async def test_round_trip_pydantic_objects(client: Client, typed: bool): new_config = client.config() new_config["data_converter"] = pydantic_data_converter client = Client(**new_config) @@ -65,12 +69,22 @@ async def test_round_trip_pydantic_objects(client: Client): workflows=[RoundTripPydanticObjectsWorkflow], activities=[pydantic_objects_activity], ): - returned_objects = await client.execute_workflow( - RoundTripPydanticObjectsWorkflow.run, - orig_objects, - id=str(uuid.uuid4()), - task_queue=task_queue_name, - ) + if typed: + returned_objects = await client.execute_workflow( + RoundTripPydanticObjectsWorkflow.run, + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + else: + returned_objects = await client.execute_workflow( + "RoundTripPydanticObjectsWorkflow", + orig_objects, + id=str(uuid.uuid4()), + task_queue=task_queue_name, + result_type=list[PydanticModels], + ) + assert returned_objects == orig_objects for o in returned_objects: o._check_instance() @@ -269,3 +283,22 @@ async def test_pydantic_model_with_strict_field_inside_sandbox(client: Client): task_queue=tq, ) assert result == orig + + +async def test_no_type_annotations(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + async with Worker( + client, + task_queue=task_queue_name, + workflows=[NoTypeAnnotationsWorkflow], + ): + result = await client.execute_workflow( + "NoTypeAnnotationsWorkflow", + (7,), + id=str(uuid.uuid4()), + task_queue=task_queue_name, + ) + assert result == [7] diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index af519b079..a4d656b20 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -166,3 +166,10 @@ async def run( self, obj: PydanticModelWithStrictField ) -> PydanticModelWithStrictField: return _test_pydantic_model_with_strict_field(obj) + + +@workflow.defn +class NoTypeAnnotationsWorkflow: + @workflow.run + async def run(self, arg): + return arg From 0e13cfaf33fb938432e26e3895a6fccfedc1fcd3 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 09:15:40 -0500 Subject: [PATCH 90/96] Handle absence of type hint --- temporalio/contrib/pydantic.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 2eb0b158c..2bf772511 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -74,8 +74,9 @@ def from_payload( See https://docs.pydantic.dev/latest/api/type_adapter/#pydantic.type_adapter.TypeAdapter.validate_json. """ + _type_hint = type_hint if type_hint is not None else Any try: - return TypeAdapter(type_hint).validate_json(payload.data) + return TypeAdapter(_type_hint).validate_json(payload.data) except ValidationError as err: raise RuntimeError("Failed parsing") from err From 512bd7b4fe9be8c1344685e9299905e217555cea Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 13:42:23 -0500 Subject: [PATCH 91/96] Allow pydantic ValidationError to be thrown out --- temporalio/contrib/pydantic.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index 2bf772511..d2c09d5ea 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -15,7 +15,7 @@ from typing import Any, Optional, Type -from pydantic import TypeAdapter, ValidationError +from pydantic import TypeAdapter from pydantic_core import to_json import temporalio.api.common.v1 @@ -75,10 +75,7 @@ def from_payload( https://docs.pydantic.dev/latest/api/type_adapter/#pydantic.type_adapter.TypeAdapter.validate_json. """ _type_hint = type_hint if type_hint is not None else Any - try: - return TypeAdapter(_type_hint).validate_json(payload.data) - except ValidationError as err: - raise RuntimeError("Failed parsing") from err + return TypeAdapter(_type_hint).validate_json(payload.data) class PydanticPayloadConverter(CompositePayloadConverter): From e91c90d8fb3bfe037249635aac02b5da35019acb Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 14:05:36 -0500 Subject: [PATCH 92/96] Test ValidationError --- tests/contrib/pydantic/test_pydantic.py | 23 +++++++++++++++++++++++ tests/contrib/pydantic/workflows.py | 7 +++++++ 2 files changed, 30 insertions(+) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index af8a08cef..ebc50af46 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -2,6 +2,7 @@ import uuid from datetime import datetime +import pydantic import pytest from pydantic import BaseModel @@ -25,6 +26,7 @@ PydanticModelWithStrictFieldWorkflow, RoundTripMiscObjectsWorkflow, RoundTripPydanticObjectsWorkflow, + ValidationErrorWorkflow, _test_pydantic_model_with_strict_field, clone_objects, misc_objects_activity, @@ -302,3 +304,24 @@ async def test_no_type_annotations(client: Client): task_queue=task_queue_name, ) assert result == [7] + + +async def test_validation_error(client: Client): + new_config = client.config() + new_config["data_converter"] = pydantic_data_converter + client = Client(**new_config) + task_queue_name = str(uuid.uuid4()) + + async with Worker( + client, + task_queue=task_queue_name, + workflows=[ValidationErrorWorkflow], + ): + with pytest.raises(pydantic.ValidationError): + await client.execute_workflow( + "ValidationErrorWorkflow", + "not-an-int", + id=str(uuid.uuid4()), + task_queue=task_queue_name, + result_type=tuple[int], + ) diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index a4d656b20..b52584769 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -173,3 +173,10 @@ class NoTypeAnnotationsWorkflow: @workflow.run async def run(self, arg): return arg + + +@workflow.defn +class ValidationErrorWorkflow: + @workflow.run + async def run(self, arg): + return arg From f5ee416d2cebf13db25b8f3ab8a01bed6f5c78fb Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 14:21:23 -0500 Subject: [PATCH 93/96] Fixup --- tests/contrib/pydantic/test_pydantic.py | 5 ++--- tests/contrib/pydantic/workflows.py | 7 ------- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/tests/contrib/pydantic/test_pydantic.py b/tests/contrib/pydantic/test_pydantic.py index ebc50af46..26764b40f 100644 --- a/tests/contrib/pydantic/test_pydantic.py +++ b/tests/contrib/pydantic/test_pydantic.py @@ -26,7 +26,6 @@ PydanticModelWithStrictFieldWorkflow, RoundTripMiscObjectsWorkflow, RoundTripPydanticObjectsWorkflow, - ValidationErrorWorkflow, _test_pydantic_model_with_strict_field, clone_objects, misc_objects_activity, @@ -315,11 +314,11 @@ async def test_validation_error(client: Client): async with Worker( client, task_queue=task_queue_name, - workflows=[ValidationErrorWorkflow], + workflows=[NoTypeAnnotationsWorkflow], ): with pytest.raises(pydantic.ValidationError): await client.execute_workflow( - "ValidationErrorWorkflow", + "NoTypeAnnotationsWorkflow", "not-an-int", id=str(uuid.uuid4()), task_queue=task_queue_name, diff --git a/tests/contrib/pydantic/workflows.py b/tests/contrib/pydantic/workflows.py index b52584769..a4d656b20 100644 --- a/tests/contrib/pydantic/workflows.py +++ b/tests/contrib/pydantic/workflows.py @@ -173,10 +173,3 @@ class NoTypeAnnotationsWorkflow: @workflow.run async def run(self, arg): return arg - - -@workflow.defn -class ValidationErrorWorkflow: - @workflow.run - async def run(self, arg): - return arg From 616c78f466bdf24679cc75d58ea1f3ff5913bc8f Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 15:41:39 -0500 Subject: [PATCH 94/96] Cleanup --- tests/contrib/pydantic/models.py | 1 - tests/contrib/pydantic/models_2.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/tests/contrib/pydantic/models.py b/tests/contrib/pydantic/models.py index 808447726..e2fdcfe2c 100644 --- a/tests/contrib/pydantic/models.py +++ b/tests/contrib/pydantic/models.py @@ -84,7 +84,6 @@ def _check_instance(self) -> None: def make_special_types_object() -> SpecialTypesModel: return SpecialTypesModel( datetime_field=datetime(2000, 1, 2, 3, 4, 5), - # 946800245 datetime_field_int=946782245, # type: ignore datetime_field_float=946782245.0, # type: ignore datetime_field_str_formatted="2000-01-02T03:04:05Z", # type: ignore diff --git a/tests/contrib/pydantic/models_2.py b/tests/contrib/pydantic/models_2.py index f04447fd0..cc81ebf2a 100644 --- a/tests/contrib/pydantic/models_2.py +++ b/tests/contrib/pydantic/models_2.py @@ -92,7 +92,6 @@ class StandardTypesModel(BaseModel): pattern_field: Pattern hashable_field: Hashable any_field: Any - # callable_field: Callable def _check_instance(self) -> None: # Boolean checks @@ -158,7 +157,6 @@ def _check_instance(self) -> None: assert isinstance(self.hashable_field, Hashable) assert self.hashable_field == "test" assert self.any_field == "anything goes" - # assert callable(self.callable_field) def make_standard_types_object() -> StandardTypesModel: @@ -199,7 +197,6 @@ def make_standard_types_object() -> StandardTypesModel: pattern_field=re.compile(r"\d+"), hashable_field="test", any_field="anything goes", - # callable_field=lambda x: x, ) From a8fb520fe2a0e023f5cf343ded569032678f49b3 Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 15:46:05 -0500 Subject: [PATCH 95/96] Test defaultdict support --- tests/contrib/pydantic/models_2.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/contrib/pydantic/models_2.py b/tests/contrib/pydantic/models_2.py index cc81ebf2a..5e9f51b23 100644 --- a/tests/contrib/pydantic/models_2.py +++ b/tests/contrib/pydantic/models_2.py @@ -84,7 +84,7 @@ class StandardTypesModel(BaseModel): # Mappings dict_field: dict - # defaultdict_field: collections.defaultdict + defaultdict_field: collections.defaultdict[str, int] counter_field: collections.Counter typed_dict_field: UserTypedDict @@ -144,8 +144,8 @@ def _check_instance(self) -> None: # Mapping checks assert isinstance(self.dict_field, dict) assert self.dict_field == {"a": 1, "b": 2} - # assert isinstance(self.defaultdict_field, collections.defaultdict) - # assert dict(self.defaultdict_field) == {"a": 1, "b": 2} + assert isinstance(self.defaultdict_field, collections.defaultdict) + assert dict(self.defaultdict_field) == {"a": 1, "b": 2} assert isinstance(self.counter_field, collections.Counter) assert dict(self.counter_field) == {"a": 1, "b": 2} assert isinstance(self.typed_dict_field, dict) @@ -190,7 +190,7 @@ def make_standard_types_object() -> StandardTypesModel: sequence_field=[1, 2, 3], # Mappings dict_field={"a": 1, "b": 2}, - # defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), + defaultdict_field=collections.defaultdict(int, {"a": 1, "b": 2}), counter_field=collections.Counter({"a": 1, "b": 2}), typed_dict_field={"name": "username", "id": 7}, # Other Types From 771de5ccb118cf2cc45062360f4914ddf15d31ba Mon Sep 17 00:00:00 2001 From: Dan Davison Date: Thu, 13 Feb 2025 18:36:33 -0500 Subject: [PATCH 96/96] Fix docstrings --- temporalio/contrib/pydantic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/temporalio/contrib/pydantic.py b/temporalio/contrib/pydantic.py index d2c09d5ea..2e78ac5d8 100644 --- a/temporalio/contrib/pydantic.py +++ b/temporalio/contrib/pydantic.py @@ -52,7 +52,7 @@ def encoding(self) -> str: def to_payload(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class. - Uses :py:func:`pydantic_core.to_json` to serialize ``value` to JSON. + Uses ``pydantic_core.to_json`` to serialize ``value`` to JSON. See https://docs.pydantic.dev/latest/api/pydantic_core/#pydantic_core.to_json. @@ -68,7 +68,7 @@ def from_payload( ) -> Any: """See base class. - Uses :py:func:`pydantic.TypeAdapter.validate_json` to construct an + Uses ``pydantic.TypeAdapter.validate_json`` to construct an instance of the type specified by ``type_hint`` from the JSON payload. See