Skip to content

Commit 5c78456

Browse files
committed
Fixed TypeError: 'NoneType' object is not subscriptable
1 parent 17da3e9 commit 5c78456

File tree

1 file changed

+18
-2
lines changed
  • interpreter/terminal_interface/profiles/defaults

1 file changed

+18
-2
lines changed

interpreter/terminal_interface/profiles/defaults/local.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,9 @@ def download_model(models_dir, models, interpreter):
136136
)
137137
]
138138
answers = inquirer.prompt(questions)
139+
140+
if answers == None:
141+
exit()
139142

140143
# Get the selected model
141144
selected_model = next(
@@ -201,6 +204,8 @@ def download_model(models_dir, models, interpreter):
201204
]
202205
answers = inquirer.prompt(questions)
203206

207+
if answers == None:
208+
exit()
204209

205210
selected_model = answers["model"]
206211

@@ -266,7 +271,11 @@ def list_ollama_models():
266271
),
267272
]
268273
name_answer = inquirer.prompt(name_question)
269-
selected_name = name_answer["name"] if name_answer else None
274+
275+
if name_answer == None:
276+
exit()
277+
278+
selected_name = name_answer["name"]
270279

271280
if selected_name == "llama3":
272281
# If the user selects llama3, we need to check if it's installed, and if not, install it
@@ -316,7 +325,11 @@ def list_ollama_models():
316325
),
317326
]
318327
model_name_answer = inquirer.prompt(model_name_question)
319-
jan_model_name = model_name_answer["jan_model_name"] if model_name_answer else None
328+
329+
if model_name_answer == None:
330+
exit()
331+
332+
jan_model_name = model_name_answer["jan_model_name"]
320333
interpreter.llm.model = f"jan/{jan_model_name}"
321334
interpreter.display_message(f"\nUsing Jan model: `{jan_model_name}` \n")
322335
time.sleep(1)
@@ -360,6 +373,9 @@ def list_ollama_models():
360373
)
361374
]
362375
answers = inquirer.prompt(questions)
376+
377+
if answers == None:
378+
exit()
363379

364380
if answers["model"] == " ↓ Download new model":
365381
model_path = download_model(models_dir, models, interpreter)

0 commit comments

Comments
 (0)