Skip to content

Commit fd3744a

Browse files
committed
replace .dict() with .model_dump().
`BaseModel.dict()` is deprecated in favor of `BaseModel.model_dump()` in Pydantic v2.
1 parent fb18ad8 commit fd3744a

File tree

6 files changed

+14
-14
lines changed

6 files changed

+14
-14
lines changed

packages/jupyter-ai-magics/jupyter_ai_magics/magics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -442,7 +442,7 @@ def handle_error(self, args: ErrorArgs):
442442

443443
prompt = f"Explain the following error:\n\n{last_error}"
444444
# Set CellArgs based on ErrorArgs
445-
values = args.dict()
445+
values = args.model_dump()
446446
values["type"] = "root"
447447
cell_args = CellArgs(**values)
448448

packages/jupyter-ai/jupyter_ai/callback_handlers/metadata.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def requires_no_arguments(func):
2020
def convert_to_serializable(obj):
2121
"""Convert an object to a JSON serializable format"""
2222
if hasattr(obj, "dict") and callable(obj.dict) and requires_no_arguments(obj.dict):
23-
return obj.dict()
23+
return obj.model_dump()
2424
if hasattr(obj, "__dict__"):
2525
return obj.__dict__
2626
return str(obj)

packages/jupyter-ai/jupyter_ai/chat_handlers/generate.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,9 +13,9 @@
1313
from langchain.chains import LLMChain
1414
from langchain.llms import BaseLLM
1515
from langchain.output_parsers import PydanticOutputParser
16-
from pydantic import BaseModel
1716
from langchain.schema.output_parser import BaseOutputParser
1817
from langchain_core.prompts import PromptTemplate
18+
from pydantic import BaseModel
1919

2020

2121
class OutlineSection(BaseModel):
@@ -55,7 +55,7 @@ async def generate_outline(description, llm=None, verbose=False):
5555
chain = NotebookOutlineChain.from_llm(llm=llm, parser=parser, verbose=verbose)
5656
outline = await chain.apredict(description=description)
5757
outline = parser.parse(outline)
58-
return outline.dict()
58+
return outline.model_dump()
5959

6060

6161
class CodeImproverChain(LLMChain):

packages/jupyter-ai/jupyter_ai/completions/handlers/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def loop(self) -> AbstractEventLoop:
6161

6262
def reply(self, reply: Union[InlineCompletionReply, InlineCompletionStreamChunk]):
6363
"""Write a reply object to the WebSocket connection."""
64-
message = reply.dict()
64+
message = reply.model_dump()
6565
super().write_message(message)
6666

6767
def initialize(self):

packages/jupyter-ai/jupyter_ai/config_manager.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ def _validate_config(self, config: GlobalConfig):
263263
read and before every write to the config file. Guarantees that the
264264
config file conforms to the JSON Schema, and that the language and
265265
embedding models have authn credentials if specified."""
266-
self.validator.validate(config.dict())
266+
self.validator.validate(config.model_dump())
267267

268268
# validate language model config
269269
if config.model_provider_id:
@@ -349,10 +349,10 @@ def _write_config(self, new_config: GlobalConfig):
349349

350350
self._validate_config(new_config)
351351
with open(self.config_path, "w") as f:
352-
json.dump(new_config.dict(), f, indent=self.indentation_depth)
352+
json.dump(new_config.model_dump(), f, indent=self.indentation_depth)
353353

354354
def delete_api_key(self, key_name: str):
355-
config_dict = self._read_config().dict()
355+
config_dict = self._read_config().model_dump()
356356
required_keys = []
357357
for provider in [
358358
self.lm_provider,
@@ -386,15 +386,15 @@ def update_config(self, config_update: UpdateConfigRequest): # type:ignore
386386
if not api_key_value:
387387
raise KeyEmptyError("API key value cannot be empty.")
388388

389-
config_dict = self._read_config().dict()
390-
Merger.merge(config_dict, config_update.dict(exclude_unset=True))
389+
config_dict = self._read_config().model_dump()
390+
Merger.merge(config_dict, config_update.model_dump(exclude_unset=True))
391391
self._write_config(GlobalConfig(**config_dict))
392392

393393
# this cannot be a property, as the parent Configurable already defines the
394394
# self.config attr.
395395
def get_config(self):
396396
config = self._read_config()
397-
config_dict = config.dict(exclude_unset=True)
397+
config_dict = config.model_dump(exclude_unset=True)
398398
api_key_names = list(config_dict.pop("api_keys").keys())
399399
return DescribeConfigResponse(
400400
**config_dict, api_keys=api_key_names, last_read=self._last_read

packages/jupyter-ai/jupyter_ai/handlers.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ def open(self):
200200
"""Handles opening of a WebSocket connection. Client ID can be retrieved
201201
from `self.client_id`."""
202202

203-
current_user = self.get_chat_user().dict()
203+
current_user = self.get_chat_user().model_dump()
204204
client_id = self.generate_client_id()
205205

206206
self.root_chat_handlers[client_id] = self
@@ -212,7 +212,7 @@ def open(self):
212212
history=ChatHistory(
213213
messages=self.chat_history, pending_messages=self.pending_messages
214214
),
215-
).dict()
215+
).model_dump()
216216
)
217217

218218
self.log.info(f"Client connected. ID: {client_id}")
@@ -238,7 +238,7 @@ def broadcast_message(self, message: Message):
238238
for client_id in client_ids:
239239
client = self.root_chat_handlers[client_id]
240240
if client:
241-
client.write_message(message.dict())
241+
client.write_message(message.model_dump())
242242

243243
# append all messages of type `ChatMessage` directly to the chat history
244244
if isinstance(

0 commit comments

Comments
 (0)