Skip to content

Commit 5716f5e

Browse files
committed
replace .dict() with .model_dump().
`BaseModel.dict()` is deprecated in favor of `BaseModel.model_dump()` in Pydantic v2.
1 parent 4a0bcb1 commit 5716f5e

File tree

6 files changed

+14
-14
lines changed

6 files changed

+14
-14
lines changed

packages/jupyter-ai-magics/jupyter_ai_magics/magics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -442,7 +442,7 @@ def handle_error(self, args: ErrorArgs):
442442

443443
prompt = f"Explain the following error:\n\n{last_error}"
444444
# Set CellArgs based on ErrorArgs
445-
values = args.dict()
445+
values = args.model_dump()
446446
values["type"] = "root"
447447
cell_args = CellArgs(**values)
448448

packages/jupyter-ai/jupyter_ai/callback_handlers/metadata.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def requires_no_arguments(func):
2020
def convert_to_serializable(obj):
2121
"""Convert an object to a JSON serializable format"""
2222
if hasattr(obj, "dict") and callable(obj.dict) and requires_no_arguments(obj.dict):
23-
return obj.dict()
23+
return obj.model_dump()
2424
if hasattr(obj, "__dict__"):
2525
return obj.__dict__
2626
return str(obj)

packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@
1313
from langchain.chains import LLMChain
1414
from langchain.llms import BaseLLM
1515
from langchain.output_parsers import PydanticOutputParser
16-
from pydantic import BaseModel
1716
from langchain.schema.output_parser import BaseOutputParser
1817
from langchain_core.prompts import PromptTemplate
18+
from pydantic import BaseModel
1919

2020

2121
class OutlineSection(BaseModel):
@@ -55,7 +55,7 @@ async def generate_outline(description, llm=None, verbose=False):
5555
chain = NotebookOutlineChain.from_llm(llm=llm, parser=parser, verbose=verbose)
5656
outline = await chain.apredict(description=description)
5757
outline = parser.parse(outline)
58-
return outline.dict()
58+
return outline.model_dump()
5959

6060

6161
class CodeImproverChain(LLMChain):

packages/jupyter-ai/jupyter_ai/completions/handlers/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def loop(self) -> AbstractEventLoop:
6161

6262
def reply(self, reply: Union[InlineCompletionReply, InlineCompletionStreamChunk]):
6363
"""Write a reply object to the WebSocket connection."""
64-
message = reply.dict()
64+
message = reply.model_dump()
6565
super().write_message(message)
6666

6767
def initialize(self):

packages/jupyter-ai/jupyter_ai/config_manager.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ def _validate_config(self, config: GlobalConfig):
263263
read and before every write to the config file. Guarantees that the
264264
config file conforms to the JSON Schema, and that the language and
265265
embedding models have authn credentials if specified."""
266-
self.validator.validate(config.dict())
266+
self.validator.validate(config.model_dump())
267267

268268
# validate language model config
269269
if config.model_provider_id:
@@ -352,10 +352,10 @@ def _write_config(self, new_config: GlobalConfig):
352352

353353
self._validate_config(new_config)
354354
with open(self.config_path, "w") as f:
355-
json.dump(new_config.dict(), f, indent=self.indentation_depth)
355+
json.dump(new_config.model_dump(), f, indent=self.indentation_depth)
356356

357357
def delete_api_key(self, key_name: str):
358-
config_dict = self._read_config().dict()
358+
config_dict = self._read_config().model_dump()
359359
required_keys = []
360360
for provider in [
361361
self.lm_provider,
@@ -389,15 +389,15 @@ def update_config(self, config_update: UpdateConfigRequest): # type:ignore
389389
if not api_key_value:
390390
raise KeyEmptyError("API key value cannot be empty.")
391391

392-
config_dict = self._read_config().dict()
393-
Merger.merge(config_dict, config_update.dict(exclude_unset=True))
392+
config_dict = self._read_config().model_dump()
393+
Merger.merge(config_dict, config_update.model_dump(exclude_unset=True))
394394
self._write_config(GlobalConfig(**config_dict))
395395

396396
# this cannot be a property, as the parent Configurable already defines the
397397
# self.config attr.
398398
def get_config(self):
399399
config = self._read_config()
400-
config_dict = config.dict(exclude_unset=True)
400+
config_dict = config.model_dump(exclude_unset=True)
401401
api_key_names = list(config_dict.pop("api_keys").keys())
402402
return DescribeConfigResponse(
403403
**config_dict, api_keys=api_key_names, last_read=self._last_read

packages/jupyter-ai/jupyter_ai/handlers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ def open(self):
200200
"""Handles opening of a WebSocket connection. Client ID can be retrieved
201201
from `self.client_id`."""
202202

203-
current_user = self.get_chat_user().dict()
203+
current_user = self.get_chat_user().model_dump()
204204
client_id = self.generate_client_id()
205205

206206
self.root_chat_handlers[client_id] = self
@@ -212,7 +212,7 @@ def open(self):
212212
history=ChatHistory(
213213
messages=self.chat_history, pending_messages=self.pending_messages
214214
),
215-
).dict()
215+
).model_dump()
216216
)
217217

218218
self.log.info(f"Client connected. ID: {client_id}")
@@ -238,7 +238,7 @@ def broadcast_message(self, message: Message):
238238
for client_id in client_ids:
239239
client = self.root_chat_handlers[client_id]
240240
if client:
241-
client.write_message(message.dict())
241+
client.write_message(message.model_dump())
242242

243243
# append all messages of type `ChatMessage` directly to the chat history
244244
if isinstance(

0 commit comments

Comments
 (0)