Skip to content

Commit 2340667

Browse files
fix: update writer-sdk wrapper to be compatible with newest version (#1269)
* fix: update to be compatible with writer-sdk 2.3.2 * fix: update to be compatible with writer-sdk 2.3.2 --------- Co-authored-by: Neelasha Bhattacharjee <[email protected]>
1 parent fb608ac commit 2340667

File tree

3 files changed

+68
-68
lines changed

3 files changed

+68
-68
lines changed

poetry.lock

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ requests = ">= 2.31.0, < 2.33"
5151
uvicorn = ">= 0.20.0, < 1"
5252
watchdog = ">= 3.0.0, < 4"
5353
websockets = ">= 12, < 16"
54-
writer-sdk = ">= 2.3.1, < 3"
54+
writer-sdk = ">= 2.3.2, < 3"
5555
python-multipart = ">=0.0.7, < 1"
5656
orjson = "^3.11.0, <4"
5757
launchdarkly-server-sdk = "^9.12.0"

src/writer/ai/__init__.py

Lines changed: 63 additions & 63 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
from writerai._exceptions import BadRequestError, WriterError
2626
from writerai._response import BinaryAPIResponse
2727
from writerai._streaming import Stream
28-
from writerai._types import Body, Headers, NotGiven, Query
28+
from writerai._types import Body, Headers, NotGiven, Omit, Query
2929
from writerai.resources import FilesResource, GraphsResource
3030
from writerai.types import (
3131
ApplicationListResponse,
@@ -95,40 +95,40 @@ class ChatOptions(APIOptions, total=False):
9595
Iterable[
9696
Union[SDKGraphTool, SDKFunctionTool, SDKLlmTool, SDKWebSearchTool]
9797
],
98-
NotGiven
98+
Omit
9999
]
100-
response_format: Union[ResponseFormat, NotGiven]
101-
logprobs: Union[bool, NotGiven]
102-
max_tokens: Union[int, NotGiven]
103-
n: Union[int, NotGiven]
104-
stop: Union[List[str], str, NotGiven]
105-
temperature: Union[float, NotGiven]
106-
top_p: Union[float, NotGiven]
100+
response_format: Union[ResponseFormat, Omit]
101+
logprobs: Union[bool, Omit]
102+
max_tokens: Union[int, Omit]
103+
n: Union[int, Omit]
104+
stop: Union[List[str], str, Omit]
105+
temperature: Union[float, Omit]
106+
top_p: Union[float, Omit]
107107

108108

109109
class CreateOptions(APIOptions, total=False):
110110
model: str
111-
best_of: Union[int, NotGiven]
112-
max_tokens: Union[int, NotGiven]
113-
random_seed: Union[int, NotGiven]
114-
stop: Union[List[str], str, NotGiven]
115-
temperature: Union[float, NotGiven]
116-
top_p: Union[float, NotGiven]
111+
best_of: Union[int, Omit]
112+
max_tokens: Union[int, Omit]
113+
random_seed: Union[int, Omit]
114+
stop: Union[List[str], str, Omit]
115+
temperature: Union[float, Omit]
116+
top_p: Union[float, Omit]
117117

118118

119119
class APIListOptions(APIOptions, total=False):
120-
after: Union[str, NotGiven]
121-
before: Union[str, NotGiven]
122-
limit: Union[int, NotGiven]
123-
order: Union[Literal["asc", "desc"], NotGiven]
120+
after: Union[str, Omit]
121+
before: Union[str, Omit]
122+
limit: Union[int, Omit]
123+
order: Union[Literal["asc", "desc"], Omit]
124124

125125

126126
class APIRetrieveJobsOptions(APIOptions, total=False):
127-
limit: Union[int, NotGiven]
128-
offset: Union[int, NotGiven]
127+
limit: Union[int, Omit]
128+
offset: Union[int, Omit]
129129
status: Union[
130130
Literal["completed", "failed", "in_progress"],
131-
NotGiven
131+
Omit
132132
]
133133

134134

@@ -479,8 +479,8 @@ def update(
479479
graphs = self._retrieve_graphs_accessor()
480480
response = graphs.update(
481481
self.id,
482-
name=payload.get("name", NotGiven()),
483-
description=payload.get("description", NotGiven()),
482+
name=payload.get("name", Omit()),
483+
description=payload.get("description", Omit()),
484484
**config
485485
)
486486
Graph.stale_ids.add(self.id)
@@ -744,7 +744,7 @@ def create_graph(
744744
graphs = Graph._retrieve_graphs_accessor()
745745
graph_object = graphs.create(
746746
name=name,
747-
description=description or NotGiven(),
747+
description=description or Omit(),
748748
**config
749749
)
750750
converted_object = cast(SDKGraph, graph_object)
@@ -801,13 +801,13 @@ def list_graphs(config: Optional[APIListOptions] = None) -> List[Graph]:
801801
Additional body parameters for the request.
802802
- `timeout` (Union[float, httpx.Timeout, None, NotGiven]):
803803
Timeout for the request in seconds.
804-
- `after` (Union[str, NotGiven]):
804+
- `after` (Union[str, Omit]):
805805
Filter to retrieve items created after a specific cursor.
806-
- `before` (Union[str, NotGiven]):
806+
- `before` (Union[str, Omit]):
807807
Filter to retrieve items created before a specific cursor.
808-
- `limit` (Union[int, NotGiven]):
808+
- `limit` (Union[int, Omit]):
809809
The number of items to retrieve.
810-
- `order` (Union[Literal["asc", "desc"], NotGiven]):
810+
- `order` (Union[Literal["asc", "desc"], Omit]):
811811
The order in which to retrieve items.
812812
"""
813813
config = config or {}
@@ -942,13 +942,13 @@ def list_files(config: Optional[APIListOptions] = None) -> List[File]:
942942
Additional body parameters for the request.
943943
- `timeout` (Union[float, httpx.Timeout, None, NotGiven]):
944944
Timeout for the request in seconds.
945-
- `after` (Union[str, NotGiven]):
945+
- `after` (Union[str, Omit]):
946946
Filter to retrieve items created after a specific cursor.
947-
- `before` (Union[str, NotGiven]):
947+
- `before` (Union[str, Omit]):
948948
Filter to retrieve items created before a specific cursor.
949-
- `limit` (Union[int, NotGiven]):
949+
- `limit` (Union[int, Omit]):
950950
The number of items to retrieve.
951-
- `order` (Union[Literal["asc", "desc"], NotGiven]):
951+
- `order` (Union[Literal["asc", "desc"], Omit]):
952952
The order in which to retrieve items.
953953
"""
954954
config = config or {}
@@ -1127,23 +1127,23 @@ class Conversation:
11271127
Configure how the model will call functions: `auto` will allow the model
11281128
to automatically choose the best tool, `none` disables tool calling.
11291129
You can also pass a specific previously defined function.
1130-
- `logprobs` (Union[bool, NotGiven]):
1130+
- `logprobs` (Union[bool, Omit]):
11311131
Specifies whether to return log probabilities of the output tokens.
11321132
- `tools` (Union[Iterable[Union[SDKGraphTool,
1133-
SDKFunctionTool, SDKLlmTool]], NotGiven]):
1133+
SDKFunctionTool, SDKLlmTool]], Omit]):
11341134
Tools available for the model to use.
1135-
- `max_tokens` (Union[int, NotGiven]):
1135+
- `max_tokens` (Union[int, Omit]):
11361136
Maximum number of tokens to generate.
1137-
- `n` (Union[int, NotGiven]):
1137+
- `n` (Union[int, Omit]):
11381138
Number of completions to generate.
1139-
- `stop` (Union[List[str], str, NotGiven]):
1139+
- `stop` (Union[List[str], str, Omit]):
11401140
Sequences where the API will stop generating tokens.
1141-
- `temperature` (Union[float, NotGiven]):
1141+
- `temperature` (Union[float, Omit]):
11421142
Controls the randomness or creativity of the model's responses.
11431143
A higher temperature results in more varied and less predictable text,
11441144
while a lower temperature produces more deterministic
11451145
and conservative outputs.
1146-
- `top_p` (Union[float, NotGiven]):
1146+
- `top_p` (Union[float, Omit]):
11471147
Sets the threshold for "nucleus sampling," a technique to focus the model's
11481148
token generation on the most likely subset of tokens. Only tokens with
11491149
cumulative probability above this threshold are considered, controlling the
@@ -1910,25 +1910,25 @@ def _send_chat_request(
19101910
f"prepared messages – {prepared_messages}, " +
19111911
f"request_data – {request_data}"
19121912
)
1913-
tools = request_data.get('tools', NotGiven())
1914-
tool_choice: Union[ToolChoice, NotGiven]
1915-
if isinstance(tools, NotGiven):
1916-
tool_choice = NotGiven()
1913+
tools = request_data.get('tools', Omit())
1914+
tool_choice: Union[ToolChoice, Omit]
1915+
if isinstance(tools, Omit):
1916+
tool_choice = Omit()
19171917
else:
19181918
tool_choice = request_data.get('tool_choice', cast(ToolChoice, 'auto'))
19191919
return client.chat.chat(
19201920
messages=prepared_messages,
19211921
model=request_model,
19221922
stream=stream,
1923-
logprobs=request_data.get('logprobs', NotGiven()),
1923+
logprobs=request_data.get('logprobs', Omit()),
19241924
tools=tools,
19251925
tool_choice=tool_choice,
1926-
response_format=request_data.get('response_format', NotGiven()),
1927-
max_tokens=request_data.get('max_tokens', NotGiven()),
1928-
n=request_data.get('n', NotGiven()),
1929-
stop=request_data.get('stop', NotGiven()),
1930-
temperature=request_data.get('temperature', NotGiven()),
1931-
top_p=request_data.get('top_p', NotGiven()),
1926+
response_format=request_data.get('response_format', Omit()),
1927+
max_tokens=request_data.get('max_tokens', Omit()),
1928+
n=request_data.get('n', Omit()),
1929+
stop=request_data.get('stop', Omit()),
1930+
temperature=request_data.get('temperature', Omit()),
1931+
top_p=request_data.get('top_p', Omit()),
19321932
extra_headers=request_data.get('extra_headers'),
19331933
extra_query=request_data.get('extra_query'),
19341934
extra_body=request_data.get('extra_body'),
@@ -2980,12 +2980,12 @@ def complete(
29802980
response_data: Completion = client.completions.create(
29812981
model=request_model,
29822982
prompt=initial_text,
2983-
best_of=config.get("best_of", NotGiven()),
2984-
max_tokens=config.get("max_tokens", NotGiven()),
2985-
random_seed=config.get("random_seed", NotGiven()),
2986-
stop=config.get("stop", NotGiven()),
2987-
temperature=config.get("temperature", NotGiven()),
2988-
top_p=config.get("top_p", NotGiven()),
2983+
best_of=config.get("best_of", Omit()),
2984+
max_tokens=config.get("max_tokens", Omit()),
2985+
random_seed=config.get("random_seed", Omit()),
2986+
stop=config.get("stop", Omit()),
2987+
temperature=config.get("temperature", Omit()),
2988+
top_p=config.get("top_p", Omit()),
29892989
extra_headers=config.get("extra_headers"),
29902990
extra_body=config.get("extra_body"),
29912991
extra_query=config.get("extra_query"),
@@ -3026,12 +3026,12 @@ def stream_complete(
30263026
model=request_model,
30273027
prompt=initial_text,
30283028
stream=True,
3029-
best_of=config.get("best_of", NotGiven()),
3030-
max_tokens=config.get("max_tokens", NotGiven()),
3031-
random_seed=config.get("random_seed", NotGiven()),
3032-
stop=config.get("stop", NotGiven()),
3033-
temperature=config.get("temperature", NotGiven()),
3034-
top_p=config.get("top_p", NotGiven()),
3029+
best_of=config.get("best_of", Omit()),
3030+
max_tokens=config.get("max_tokens", Omit()),
3031+
random_seed=config.get("random_seed", Omit()),
3032+
stop=config.get("stop", Omit()),
3033+
temperature=config.get("temperature", Omit()),
3034+
top_p=config.get("top_p", Omit()),
30353035
extra_headers=config.get("extra_headers"),
30363036
extra_body=config.get("extra_body"),
30373037
extra_query=config.get("extra_query"),

0 commit comments

Comments
 (0)