Skip to content

Commit d5d5d4c

Browse files
committed
default to None for all Optional fields explicitly
1 parent b354c24 commit d5d5d4c

File tree

6 files changed

+41
-41
lines changed

6 files changed

+41
-41
lines changed

packages/jupyter-ai-magics/jupyter_ai_magics/models/completion.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,12 +21,12 @@ class InlineCompletionRequest(BaseModel):
2121
# whether to stream the response (if supported by the model)
2222
stream: bool
2323
# path to the notebook of file for which the completions are generated
24-
path: Optional[str]
24+
path: Optional[str] = None
2525
# language inferred from the document mime type (if possible)
26-
language: Optional[str]
26+
language: Optional[str] = None
2727
# identifier of the cell for which the completions are generated if in a notebook
2828
# previous cells and following cells can be used to learn the wider context
29-
cell_id: Optional[str]
29+
cell_id: Optional[str] = None
3030

3131

3232
class InlineCompletionItem(BaseModel):
@@ -36,9 +36,9 @@ class InlineCompletionItem(BaseModel):
3636
"""
3737

3838
insertText: str
39-
filterText: Optional[str]
40-
isIncomplete: Optional[bool]
41-
token: Optional[str]
39+
filterText: Optional[str] = None
40+
isIncomplete: Optional[bool] = None
41+
token: Optional[str] = None
4242

4343

4444
class CompletionError(BaseModel):
@@ -59,7 +59,7 @@ class InlineCompletionReply(BaseModel):
5959
list: InlineCompletionList
6060
# number of request for which we are replying
6161
reply_to: int
62-
error: Optional[CompletionError]
62+
error: Optional[CompletionError] = None
6363

6464

6565
class InlineCompletionStreamChunk(BaseModel):
@@ -69,7 +69,7 @@ class InlineCompletionStreamChunk(BaseModel):
6969
response: InlineCompletionItem
7070
reply_to: int
7171
done: bool
72-
error: Optional[CompletionError]
72+
error: Optional[CompletionError] = None
7373

7474

7575
__all__ = [

packages/jupyter-ai-magics/jupyter_ai_magics/parsers.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -46,23 +46,23 @@ class CellArgs(BaseModel):
4646
type: Literal["root"] = "root"
4747
model_id: str
4848
format: FORMAT_CHOICES_TYPE
49-
model_parameters: Optional[str]
49+
model_parameters: Optional[str] = None
5050
# The following parameters are required only for SageMaker models
51-
region_name: Optional[str]
52-
request_schema: Optional[str]
53-
response_path: Optional[str]
51+
region_name: Optional[str] = None
52+
request_schema: Optional[str] = None
53+
response_path: Optional[str] = None
5454

5555

5656
# Should match CellArgs
5757
class ErrorArgs(BaseModel):
5858
type: Literal["error"] = "error"
5959
model_id: str
6060
format: FORMAT_CHOICES_TYPE
61-
model_parameters: Optional[str]
61+
model_parameters: Optional[str] = None
6262
# The following parameters are required only for SageMaker models
63-
region_name: Optional[str]
64-
request_schema: Optional[str]
65-
response_path: Optional[str]
63+
region_name: Optional[str] = None
64+
request_schema: Optional[str] = None
65+
response_path: Optional[str] = None
6666

6767

6868
class HelpArgs(BaseModel):
@@ -75,7 +75,7 @@ class VersionArgs(BaseModel):
7575

7676
class ListArgs(BaseModel):
7777
type: Literal["list"] = "list"
78-
provider_id: Optional[str]
78+
provider_id: Optional[str] = None
7979

8080

8181
class RegisterArgs(BaseModel):

packages/jupyter-ai/jupyter_ai/chat_handlers/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ class HandlerRoutingType(BaseModel):
6868
class SlashCommandRoutingType(HandlerRoutingType):
6969
routing_method = "slash_command"
7070

71-
slash_id: Optional[str]
71+
slash_id: Optional[str] = None
7272
"""Slash ID for routing a chat command to this handler. Only one handler
7373
may declare a particular slash ID. Must contain only alphanumerics and
7474
underscores."""

packages/jupyter-ai/jupyter_ai/config_manager.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -98,9 +98,9 @@ class ConfigManager(Configurable):
9898
config=True,
9999
)
100100

101-
model_provider_id: Optional[str]
102-
embeddings_provider_id: Optional[str]
103-
completions_model_provider_id: Optional[str]
101+
model_provider_id: Optional[str] = None
102+
embeddings_provider_id: Optional[str] = None
103+
completions_model_provider_id: Optional[str] = None
104104

105105
def __init__(
106106
self,

packages/jupyter-ai/jupyter_ai/models.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class CellWithErrorSelection(BaseModel):
3737
# the type of message used to chat with the agent
3838
class ChatRequest(BaseModel):
3939
prompt: str
40-
selection: Optional[Selection]
40+
selection: Optional[Selection] = None
4141

4242

4343
class StopRequest(BaseModel):
@@ -54,7 +54,7 @@ class StopRequest(BaseModel):
5454

5555
class ClearRequest(BaseModel):
5656
type: Literal["clear"] = "clear"
57-
target: Optional[str]
57+
target: Optional[str] = None
5858
"""
5959
Message ID of the HumanChatMessage to delete an exchange at.
6060
If not provided, this requests the backend to clear all messages.
@@ -67,8 +67,8 @@ class ChatUser(BaseModel):
6767
initials: str
6868
name: str
6969
display_name: str
70-
color: Optional[str]
71-
avatar_url: Optional[str]
70+
color: Optional[str] = None
71+
avatar_url: Optional[str] = None
7272

7373

7474
class ChatClient(ChatUser):
@@ -148,7 +148,7 @@ class HumanChatMessage(BaseModel):
148148
`prompt` and `selection`."""
149149
prompt: str
150150
"""The prompt typed into the chat input by the user."""
151-
selection: Optional[Selection]
151+
selection: Optional[Selection] = None
152152
"""The selection included with the prompt, if any."""
153153
client: ChatClient
154154

@@ -238,8 +238,8 @@ class IndexMetadata(BaseModel):
238238

239239

240240
class DescribeConfigResponse(BaseModel):
241-
model_provider_id: Optional[str]
242-
embeddings_provider_id: Optional[str]
241+
model_provider_id: Optional[str] = None
242+
embeddings_provider_id: Optional[str] = None
243243
send_with_shift_enter: bool
244244
fields: Dict[str, Dict[str, Any]]
245245
# when sending config over REST API, do not include values of the API keys,
@@ -248,7 +248,7 @@ class DescribeConfigResponse(BaseModel):
248248
# timestamp indicating when the configuration file was last read. should be
249249
# passed to the subsequent UpdateConfig request.
250250
last_read: int
251-
completions_model_provider_id: Optional[str]
251+
completions_model_provider_id: Optional[str] = None
252252
completions_fields: Dict[str, Dict[str, Any]]
253253

254254

@@ -258,16 +258,16 @@ def forbid_none(cls, v):
258258

259259

260260
class UpdateConfigRequest(BaseModel):
261-
model_provider_id: Optional[str]
262-
embeddings_provider_id: Optional[str]
263-
send_with_shift_enter: Optional[bool]
264-
api_keys: Optional[Dict[str, str]]
265-
fields: Optional[Dict[str, Dict[str, Any]]]
261+
model_provider_id: Optional[str] = None
262+
embeddings_provider_id: Optional[str] = None
263+
send_with_shift_enter: Optional[bool] = None
264+
api_keys: Optional[Dict[str, str]] = None
265+
fields: Optional[Dict[str, Dict[str, Any]]] = None
266266
# if passed, this will raise an Error if the config was written to after the
267267
# time specified by `last_read` to prevent write-write conflicts.
268-
last_read: Optional[int]
269-
completions_model_provider_id: Optional[str]
270-
completions_fields: Optional[Dict[str, Dict[str, Any]]]
268+
last_read: Optional[int] = None
269+
completions_model_provider_id: Optional[str] = None
270+
completions_fields: Optional[Dict[str, Dict[str, Any]]] = None
271271

272272
_validate_send_wse = validator("send_with_shift_enter", allow_reuse=True)(
273273
forbid_none
@@ -280,12 +280,12 @@ class GlobalConfig(BaseModel):
280280
"""Model used to represent the config by ConfigManager. This is exclusive to
281281
the backend and should never be sent to the client."""
282282

283-
model_provider_id: Optional[str]
284-
embeddings_provider_id: Optional[str]
283+
model_provider_id: Optional[str] = None
284+
embeddings_provider_id: Optional[str] = None
285285
send_with_shift_enter: bool
286286
fields: Dict[str, Dict[str, Any]]
287287
api_keys: Dict[str, str]
288-
completions_model_provider_id: Optional[str]
288+
completions_model_provider_id: Optional[str] = None
289289
completions_fields: Dict[str, Dict[str, Any]]
290290

291291

packages/jupyter-ai/jupyter_ai/tests/test_handlers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ class MockProvider(BaseProvider, FakeListLLM):
3434
name = "My Provider"
3535
model_id_key = "model"
3636
models = ["model"]
37-
should_raise: Optional[bool]
37+
should_raise: Optional[bool] = None
3838

3939
def __init__(self, **kwargs):
4040
if "responses" not in kwargs:

0 commit comments

Comments
 (0)