Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 47 additions & 48 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name = "credal"

[tool.poetry]
name = "credal"
version = "0.1.7"
version = "0.1.8"
description = ""
readme = "README.md"
authors = []
Expand Down Expand Up @@ -36,7 +36,6 @@ Repository = 'https://github.com/credal-ai/credal-python-sdk'
[tool.poetry.dependencies]
python = "^3.8"
httpx = ">=0.21.2"
httpx-sse = "0.4.0"
pydantic = ">= 1.9.2"
pydantic-core = ">=2.18.2"
typing_extensions = ">= 4.0.0"
Expand Down
1 change: 0 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
httpx>=0.21.2
httpx-sse==0.4.0
pydantic>= 1.9.2
pydantic-core>=2.18.2
typing_extensions>= 4.0.0
3 changes: 0 additions & 3 deletions src/credal/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@
MessageFeedback,
MessageReply,
NumberFieldSchema,
PolicyTrigger,
ReferencedSource,
ResponseChunk,
SendAgentMessageResponse,
Expand Down Expand Up @@ -137,7 +136,6 @@
"MongoSourceFieldsConfig": ".document_collections",
"NumberFieldSchema": ".copilots",
"Operator": ".common",
"PolicyTrigger": ".copilots",
"ReferencedSource": ".copilots",
"ResourceIdentifier": ".common",
"ResourceIdentifier_ExternalResourceId": ".common",
Expand Down Expand Up @@ -243,7 +241,6 @@ def __dir__():
"MongoSourceFieldsConfig",
"NumberFieldSchema",
"Operator",
"PolicyTrigger",
"ReferencedSource",
"ResourceIdentifier",
"ResourceIdentifier_ExternalResourceId",
Expand Down
3 changes: 0 additions & 3 deletions src/credal/copilots/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
MessageFeedback,
MessageReply,
NumberFieldSchema,
PolicyTrigger,
ReferencedSource,
ResponseChunk,
SendAgentMessageResponse,
Expand Down Expand Up @@ -81,7 +80,6 @@
"MessageFeedback": ".types",
"MessageReply": ".types",
"NumberFieldSchema": ".types",
"PolicyTrigger": ".types",
"ReferencedSource": ".types",
"ResponseChunk": ".types",
"SendAgentMessageResponse": ".types",
Expand Down Expand Up @@ -150,7 +148,6 @@ def __dir__():
"MessageFeedback",
"MessageReply",
"NumberFieldSchema",
"PolicyTrigger",
"ReferencedSource",
"ResponseChunk",
"SendAgentMessageResponse",
Expand Down
36 changes: 26 additions & 10 deletions src/credal/copilots/raw_client.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
# This file was auto-generated by Fern from our API Definition.

import contextlib
import json
import typing
import uuid
from json.decoder import JSONDecodeError
from logging import error, warning

import httpx_sse
from ..common.types.collaborator import Collaborator
from ..core.api_error import ApiError
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ..core.http_response import AsyncHttpResponse, HttpResponse
from ..core.http_sse._api import EventSource
from ..core.pydantic_utilities import parse_obj_as
from ..core.request_options import RequestOptions
from ..core.serialization import convert_and_respect_annotation_metadata
Expand Down Expand Up @@ -312,7 +312,7 @@ def _stream() -> HttpResponse[typing.Iterator[StreamingChunk]]:
if 200 <= _response.status_code < 300:

def _iter():
_event_source = httpx_sse.EventSource(_response)
_event_source = EventSource(_response)
for _sse in _event_source.iter_sse():
if _sse.data == None:
return
Expand All @@ -321,11 +321,19 @@ def _iter():
StreamingChunk,
parse_obj_as(
type_=StreamingChunk, # type: ignore
object_=json.loads(_sse.data),
object_=_sse.json(),
),
)
except Exception:
pass
except JSONDecodeError as e:
warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}")
except (TypeError, ValueError, KeyError, AttributeError) as e:
warning(
f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}"
)
except Exception as e:
error(
f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}"
)
return

return HttpResponse(response=_response, data=_iter())
Expand Down Expand Up @@ -795,7 +803,7 @@ async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[StreamingChunk]]:
if 200 <= _response.status_code < 300:

async def _iter():
_event_source = httpx_sse.EventSource(_response)
_event_source = EventSource(_response)
async for _sse in _event_source.aiter_sse():
if _sse.data == None:
return
Expand All @@ -804,11 +812,19 @@ async def _iter():
StreamingChunk,
parse_obj_as(
type_=StreamingChunk, # type: ignore
object_=json.loads(_sse.data),
object_=_sse.json(),
),
)
except Exception:
pass
except JSONDecodeError as e:
warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}")
except (TypeError, ValueError, KeyError, AttributeError) as e:
warning(
f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}"
)
except Exception as e:
error(
f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}"
)
return

return AsyncHttpResponse(response=_response, data=_iter())
Expand Down
Loading