diff --git a/poetry.lock b/poetry.lock index 0985d99..ba1fd73 100644 --- a/poetry.lock +++ b/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.26.1)"] [[package]] name = "certifi" -version = "2025.8.3" +version = "2025.10.5" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" files = [ - {file = "certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5"}, - {file = "certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407"}, + {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"}, + {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"}, ] [[package]] @@ -131,17 +131,6 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] -[[package]] -name = "httpx-sse" -version = "0.4.0" -description = "Consume Server-Sent Event (SSE) messages with HTTPX." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, - {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, -] - [[package]] name = "idna" version = "3.10" @@ -494,43 +483,53 @@ files = [ [[package]] name = "tomli" -version = "2.2.1" +version = "2.3.0" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, ] [[package]] @@ -558,4 +557,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "3c4bf0b75d27d2ce3738ca3d6b64dfb03909c56fb8e280a98890abd4619134d8" +content-hash = "8551b871abee465e23fb0966d51f2c155fd257b55bdcb0c02d095de19f92f358" diff --git a/pyproject.toml b/pyproject.toml index ef6b945..a33bf6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "credal" [tool.poetry] name = "credal" -version = "0.1.7" +version = "0.1.8" description = "" readme = "README.md" authors = [] @@ -36,7 +36,6 @@ Repository = 'https://github.com/credal-ai/credal-python-sdk' [tool.poetry.dependencies] python = "^3.8" httpx = ">=0.21.2" -httpx-sse = "0.4.0" pydantic = ">= 1.9.2" pydantic-core = ">=2.18.2" typing_extensions = ">= 4.0.0" diff --git a/requirements.txt b/requirements.txt index f129cb3..e80f640 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ httpx>=0.21.2 -httpx-sse==0.4.0 pydantic>= 1.9.2 pydantic-core>=2.18.2 typing_extensions>= 4.0.0 diff --git a/src/credal/__init__.py b/src/credal/__init__.py index c939153..8b11245 100644 --- a/src/credal/__init__.py +++ b/src/credal/__init__.py @@ -49,7 +49,6 @@ MessageFeedback, MessageReply, NumberFieldSchema, - PolicyTrigger, ReferencedSource, ResponseChunk, SendAgentMessageResponse, @@ -137,7 +136,6 @@ "MongoSourceFieldsConfig": ".document_collections", "NumberFieldSchema": ".copilots", "Operator": ".common", - "PolicyTrigger": ".copilots", "ReferencedSource": ".copilots", "ResourceIdentifier": ".common", "ResourceIdentifier_ExternalResourceId": ".common", @@ -243,7 +241,6 @@ def __dir__(): "MongoSourceFieldsConfig", "NumberFieldSchema", "Operator", - "PolicyTrigger", "ReferencedSource", "ResourceIdentifier", "ResourceIdentifier_ExternalResourceId", diff --git a/src/credal/copilots/__init__.py b/src/credal/copilots/__init__.py index 4dabc9f..df887e7 100644 --- a/src/credal/copilots/__init__.py +++ b/src/credal/copilots/__init__.py @@ -35,7 +35,6 @@ MessageFeedback, MessageReply, NumberFieldSchema, - PolicyTrigger, ReferencedSource, ResponseChunk, SendAgentMessageResponse, @@ -81,7 +80,6 @@ "MessageFeedback": ".types", "MessageReply": ".types", "NumberFieldSchema": ".types", - "PolicyTrigger": ".types", "ReferencedSource": ".types", "ResponseChunk": ".types", "SendAgentMessageResponse": ".types", @@ -150,7 +148,6 @@ def __dir__(): "MessageFeedback", "MessageReply", "NumberFieldSchema", - "PolicyTrigger", "ReferencedSource", "ResponseChunk", "SendAgentMessageResponse", diff --git a/src/credal/copilots/raw_client.py b/src/credal/copilots/raw_client.py index 566f4fe..8595bac 100644 --- a/src/credal/copilots/raw_client.py +++ b/src/credal/copilots/raw_client.py @@ -1,16 +1,16 @@ # This file was auto-generated by Fern from our API Definition. import contextlib -import json import typing import uuid from json.decoder import JSONDecodeError +from logging import error, warning -import httpx_sse from ..common.types.collaborator import Collaborator from ..core.api_error import ApiError from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ..core.http_response import AsyncHttpResponse, HttpResponse +from ..core.http_sse._api import EventSource from ..core.pydantic_utilities import parse_obj_as from ..core.request_options import RequestOptions from ..core.serialization import convert_and_respect_annotation_metadata @@ -312,7 +312,7 @@ def _stream() -> HttpResponse[typing.Iterator[StreamingChunk]]: if 200 <= _response.status_code < 300: def _iter(): - _event_source = httpx_sse.EventSource(_response) + _event_source = EventSource(_response) for _sse in _event_source.iter_sse(): if _sse.data == None: return @@ -321,11 +321,19 @@ def _iter(): StreamingChunk, parse_obj_as( type_=StreamingChunk, # type: ignore - object_=json.loads(_sse.data), + object_=_sse.json(), ), ) - except Exception: - pass + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) return return HttpResponse(response=_response, data=_iter()) @@ -795,7 +803,7 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[StreamingChunk]]: if 200 <= _response.status_code < 300: async def _iter(): - _event_source = httpx_sse.EventSource(_response) + _event_source = EventSource(_response) async for _sse in _event_source.aiter_sse(): if _sse.data == None: return @@ -804,11 +812,19 @@ async def _iter(): StreamingChunk, parse_obj_as( type_=StreamingChunk, # type: ignore - object_=json.loads(_sse.data), + object_=_sse.json(), ), ) - except Exception: - pass + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) return return AsyncHttpResponse(response=_response, data=_iter()) diff --git a/src/credal/copilots/types/__init__.py b/src/credal/copilots/types/__init__.py index 83eda0f..6a6cf4c 100644 --- a/src/credal/copilots/types/__init__.py +++ b/src/credal/copilots/types/__init__.py @@ -30,7 +30,6 @@ from .message_feedback import MessageFeedback from .message_reply import MessageReply from .number_field_schema import NumberFieldSchema - from .policy_trigger import PolicyTrigger from .referenced_source import ReferencedSource from .response_chunk import ResponseChunk from .send_agent_message_response import SendAgentMessageResponse @@ -79,7 +78,6 @@ "MessageFeedback": ".message_feedback", "MessageReply": ".message_reply", "NumberFieldSchema": ".number_field_schema", - "PolicyTrigger": ".policy_trigger", "ReferencedSource": ".referenced_source", "ResponseChunk": ".response_chunk", "SendAgentMessageResponse": ".send_agent_message_response", @@ -148,7 +146,6 @@ def __dir__(): "MessageFeedback", "MessageReply", "NumberFieldSchema", - "PolicyTrigger", "ReferencedSource", "ResponseChunk", "SendAgentMessageResponse", diff --git a/src/credal/copilots/types/blocked_chunk.py b/src/credal/copilots/types/blocked_chunk.py index e827055..3afb931 100644 --- a/src/credal/copilots/types/blocked_chunk.py +++ b/src/credal/copilots/types/blocked_chunk.py @@ -7,14 +7,12 @@ import typing_extensions from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata -from .policy_trigger import PolicyTrigger class BlockedChunk(UniversalBaseModel): conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] warnings: typing.List[str] blocks: typing.List[str] - policy_triggers: typing_extensions.Annotated[typing.List[PolicyTrigger], FieldMetadata(alias="policyTriggers")] if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/src/credal/copilots/types/message_blocked.py b/src/credal/copilots/types/message_blocked.py index a53c92d..f2da9b8 100644 --- a/src/credal/copilots/types/message_blocked.py +++ b/src/credal/copilots/types/message_blocked.py @@ -8,11 +8,9 @@ from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .inserted_audit_log import InsertedAuditLog -from .policy_trigger import PolicyTrigger class MessageBlocked(UniversalBaseModel): - policy_triggers: typing.List[PolicyTrigger] conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] blocks: typing.List[str] warnings: typing.List[str] diff --git a/src/credal/copilots/types/message_reply.py b/src/credal/copilots/types/message_reply.py index 93c4332..bba16f9 100644 --- a/src/credal/copilots/types/message_reply.py +++ b/src/credal/copilots/types/message_reply.py @@ -8,14 +8,12 @@ from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .inserted_audit_log import InsertedAuditLog -from .policy_trigger import PolicyTrigger from .referenced_source import ReferencedSource from .response_chunk import ResponseChunk from .web_search_result import WebSearchResult class MessageReply(UniversalBaseModel): - policy_triggers: typing.List[PolicyTrigger] conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] response: ResponseChunk warnings: typing.List[str] diff --git a/src/credal/copilots/types/policy_trigger.py b/src/credal/copilots/types/policy_trigger.py deleted file mode 100644 index 6a43a64..0000000 --- a/src/credal/copilots/types/policy_trigger.py +++ /dev/null @@ -1,21 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing - -import pydantic -from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel - - -class PolicyTrigger(UniversalBaseModel): - id: str - name: str - description: str - - if IS_PYDANTIC_V2: - model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 - else: - - class Config: - frozen = True - smart_union = True - extra = pydantic.Extra.allow diff --git a/src/credal/copilots/types/send_message_response.py b/src/credal/copilots/types/send_message_response.py index f8d3fb7..609f4d3 100644 --- a/src/credal/copilots/types/send_message_response.py +++ b/src/credal/copilots/types/send_message_response.py @@ -10,7 +10,6 @@ from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ...core.serialization import FieldMetadata from .inserted_audit_log import InsertedAuditLog -from .policy_trigger import PolicyTrigger from .referenced_source import ReferencedSource from .response_chunk import ResponseChunk from .web_search_result import WebSearchResult @@ -18,7 +17,6 @@ class SendMessageResponse_AiResponseResult(UniversalBaseModel): type: typing.Literal["ai_response_result"] = "ai_response_result" - policy_triggers: typing.List[PolicyTrigger] conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] response: ResponseChunk warnings: typing.List[str] @@ -46,7 +44,6 @@ class Config: class SendMessageResponse_BlockedResult(UniversalBaseModel): type: typing.Literal["blocked_result"] = "blocked_result" - policy_triggers: typing.List[PolicyTrigger] conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] blocks: typing.List[str] warnings: typing.List[str] diff --git a/src/credal/copilots/types/streaming_chunk.py b/src/credal/copilots/types/streaming_chunk.py index 7c1b7ea..5c3eddc 100644 --- a/src/credal/copilots/types/streaming_chunk.py +++ b/src/credal/copilots/types/streaming_chunk.py @@ -11,7 +11,6 @@ from ...core.serialization import FieldMetadata from .data_filter import DataFilter from .error_chunk_data import ErrorChunkData -from .policy_trigger import PolicyTrigger from .referenced_source import ReferencedSource from .web_search_result import WebSearchResult @@ -81,7 +80,6 @@ class StreamingChunk_Blocked(UniversalBaseModel): conversation_id: typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="conversationId")] warnings: typing.List[str] blocks: typing.List[str] - policy_triggers: typing_extensions.Annotated[typing.List[PolicyTrigger], FieldMetadata(alias="policyTriggers")] if IS_PYDANTIC_V2: model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 diff --git a/src/credal/core/client_wrapper.py b/src/credal/core/client_wrapper.py index 3eb4145..01f9f7c 100644 --- a/src/credal/core/client_wrapper.py +++ b/src/credal/core/client_wrapper.py @@ -22,10 +22,10 @@ def __init__( def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { - "User-Agent": "credal/0.1.7", + "User-Agent": "credal/0.1.8", "X-Fern-Language": "Python", "X-Fern-SDK-Name": "credal", - "X-Fern-SDK-Version": "0.1.7", + "X-Fern-SDK-Version": "0.1.8", **(self.get_custom_headers() or {}), } headers["Authorization"] = f"Bearer {self._get_api_key()}" diff --git a/src/credal/core/http_sse/__init__.py b/src/credal/core/http_sse/__init__.py new file mode 100644 index 0000000..730e5a3 --- /dev/null +++ b/src/credal/core/http_sse/__init__.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from ._api import EventSource, aconnect_sse, connect_sse + from ._exceptions import SSEError + from ._models import ServerSentEvent +_dynamic_imports: typing.Dict[str, str] = { + "EventSource": "._api", + "SSEError": "._exceptions", + "ServerSentEvent": "._models", + "aconnect_sse": "._api", + "connect_sse": "._api", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"] diff --git a/src/credal/core/http_sse/_api.py b/src/credal/core/http_sse/_api.py new file mode 100644 index 0000000..f900b3b --- /dev/null +++ b/src/credal/core/http_sse/_api.py @@ -0,0 +1,112 @@ +# This file was auto-generated by Fern from our API Definition. + +import re +from contextlib import asynccontextmanager, contextmanager +from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast + +import httpx +from ._decoders import SSEDecoder +from ._exceptions import SSEError +from ._models import ServerSentEvent + + +class EventSource: + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def _check_content_type(self) -> None: + content_type = self._response.headers.get("content-type", "").partition(";")[0] + if "text/event-stream" not in content_type: + raise SSEError( + f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}" + ) + + def _get_charset(self) -> str: + """Extract charset from Content-Type header, fallback to UTF-8.""" + content_type = self._response.headers.get("content-type", "") + + # Parse charset parameter using regex + charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE) + if charset_match: + charset = charset_match.group(1).strip("\"'") + # Validate that it's a known encoding + try: + # Test if the charset is valid by trying to encode/decode + "test".encode(charset).decode(charset) + return charset + except (LookupError, UnicodeError): + # If charset is invalid, fall back to UTF-8 + pass + + # Default to UTF-8 if no charset specified or invalid charset + return "utf-8" + + @property + def response(self) -> httpx.Response: + return self._response + + def iter_sse(self) -> Iterator[ServerSentEvent]: + self._check_content_type() + decoder = SSEDecoder() + charset = self._get_charset() + + buffer = "" + for chunk in self._response.iter_bytes(): + # Decode chunk using detected charset + text_chunk = chunk.decode(charset, errors="replace") + buffer += text_chunk + + # Process complete lines + while "\n" in buffer: + line, buffer = buffer.split("\n", 1) + line = line.rstrip("\r") + sse = decoder.decode(line) + # when we reach a "\n\n" => line = '' + # => decoder will attempt to return an SSE Event + if sse is not None: + yield sse + + # Process any remaining data in buffer + if buffer.strip(): + line = buffer.rstrip("\r") + sse = decoder.decode(line) + if sse is not None: + yield sse + + async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]: + self._check_content_type() + decoder = SSEDecoder() + lines = cast(AsyncGenerator[str, None], self._response.aiter_lines()) + try: + async for line in lines: + line = line.rstrip("\n") + sse = decoder.decode(line) + if sse is not None: + yield sse + finally: + await lines.aclose() + + +@contextmanager +def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) + + +@asynccontextmanager +async def aconnect_sse( + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, +) -> AsyncIterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + async with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) diff --git a/src/credal/core/http_sse/_decoders.py b/src/credal/core/http_sse/_decoders.py new file mode 100644 index 0000000..339b089 --- /dev/null +++ b/src/credal/core/http_sse/_decoders.py @@ -0,0 +1,61 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Optional + +from ._models import ServerSentEvent + + +class SSEDecoder: + def __init__(self) -> None: + self._event = "" + self._data: List[str] = [] + self._last_event_id = "" + self._retry: Optional[int] = None + + def decode(self, line: str) -> Optional[ServerSentEvent]: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = "" + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None diff --git a/src/credal/core/http_sse/_exceptions.py b/src/credal/core/http_sse/_exceptions.py new file mode 100644 index 0000000..81605a8 --- /dev/null +++ b/src/credal/core/http_sse/_exceptions.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import httpx + + +class SSEError(httpx.TransportError): + pass diff --git a/src/credal/core/http_sse/_models.py b/src/credal/core/http_sse/_models.py new file mode 100644 index 0000000..1af57f8 --- /dev/null +++ b/src/credal/core/http_sse/_models.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import json +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass(frozen=True) +class ServerSentEvent: + event: str = "message" + data: str = "" + id: str = "" + retry: Optional[int] = None + + def json(self) -> Any: + """Parse the data field as JSON.""" + return json.loads(self.data)