Skip to content

Commit a0af47e

Browse files
committed
add both modelid and model infor in conversation state
1 parent a92bb9c commit a0af47e

File tree

5 files changed

+16
-11
lines changed

5 files changed

+16
-11
lines changed

crates/chat-cli/src/cli/chat/cli/context.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,7 @@ impl ContextSubcommand {
222222
execute!(session.stderr, style::Print(format!("{}\n\n", "▔".repeat(3))),)?;
223223
}
224224

225-
let context_files_max_size = calc_max_context_files_size(session.conversation.model.as_ref());
225+
let context_files_max_size = calc_max_context_files_size(session.conversation.model_info.as_ref());
226226
let mut files_as_vec = profile_context_files
227227
.iter()
228228
.map(|(path, content, _)| (path.clone(), content.clone()))

crates/chat-cli/src/cli/chat/cli/model.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -154,7 +154,7 @@ pub async fn select_model(os: &Os, session: &mut ChatSession) -> Result<Option<C
154154
return Ok(None);
155155
}
156156

157-
let active_model_id = session.conversation.model.as_ref().map(|m| m.model_id.as_str());
157+
let active_model_id = session.conversation.model_info.as_ref().map(|m| m.model_id.as_str());
158158

159159
let labels: Vec<String> = models
160160
.iter()
@@ -190,7 +190,7 @@ pub async fn select_model(os: &Os, session: &mut ChatSession) -> Result<Option<C
190190

191191
if let Some(index) = selection {
192192
let selected = models[index].clone();
193-
session.conversation.model = Some(selected.clone());
193+
session.conversation.model_info = Some(selected.clone());
194194
let display_name = selected.display_name();
195195

196196
queue!(

crates/chat-cli/src/cli/chat/cli/usage.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ impl UsageArgs {
6262
// set a max width for the progress bar for better aesthetic
6363
let progress_bar_width = std::cmp::min(window_width, 80);
6464

65-
let context_window_size = context_window_tokens(session.conversation.model.as_ref());
65+
let context_window_size = context_window_tokens(session.conversation.model_info.as_ref());
6666
let context_width =
6767
((context_token_count.value() as f64 / context_window_size as f64) * progress_bar_width as f64) as usize;
6868
let assistant_width =

crates/chat-cli/src/cli/chat/conversation.rs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,13 @@ pub struct ConversationState {
112112
latest_summary: Option<(String, RequestMetadata)>,
113113
#[serde(skip)]
114114
pub agents: Agents,
115+
/// Legacy, unused
115116
/// Model explicitly selected by the user in this conversation state via `/model`.
116117
#[serde(default, skip_serializing_if = "Option::is_none")]
117-
pub model: Option<ModelInfo>,
118+
pub model: Option<String>,
119+
/// Model explicitly selected by the user in this conversation state via `/model`.
120+
#[serde(default, skip_serializing_if = "Option::is_none")]
121+
pub model_info: Option<ModelInfo>,
118122
/// Used to track agent vs user updates to file modifications.
119123
///
120124
/// Maps from a file path to [FileLineTracker]
@@ -173,7 +177,8 @@ impl ConversationState {
173177
context_message_length: None,
174178
latest_summary: None,
175179
agents,
176-
model,
180+
model: None,
181+
model_info: model,
177182
file_line_tracker: HashMap::new(),
178183
}
179184
}
@@ -457,7 +462,7 @@ impl ConversationState {
457462
context_messages,
458463
dropped_context_files,
459464
tools: &self.tools,
460-
model_id: self.model.as_ref().map(|m| m.model_id.as_str()),
465+
model_id: self.model_info.as_ref().map(|m| m.model_id.as_str()),
461466
})
462467
}
463468

@@ -555,7 +560,7 @@ impl ConversationState {
555560
conversation_id: Some(self.conversation_id.clone()),
556561
user_input_message: summary_message
557562
.unwrap_or(UserMessage::new_prompt(summary_content, None)) // should not happen
558-
.into_user_input_message(self.model.as_ref().map(|m| m.model_id.clone()), &tools),
563+
.into_user_input_message(self.model_info.as_ref().map(|m| m.model_id.clone()), &tools),
559564
history: Some(flatten_history(history.iter())),
560565
})
561566
}
@@ -668,7 +673,7 @@ impl ConversationState {
668673
/// Get the current token warning level
669674
pub async fn get_token_warning_level(&mut self, os: &Os) -> Result<TokenWarningLevel, ChatError> {
670675
let total_chars = self.calculate_char_count(os).await?;
671-
let max_chars = TokenCounter::token_to_chars(context_window_tokens(self.model.as_ref()));
676+
let max_chars = TokenCounter::token_to_chars(context_window_tokens(self.model_info.as_ref()));
672677

673678
Ok(if *total_chars >= max_chars {
674679
TokenWarningLevel::Critical

crates/chat-cli/src/cli/chat/mod.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1184,7 +1184,7 @@ impl ChatSession {
11841184
}
11851185
self.stderr.flush()?;
11861186

1187-
if let Some(ref model_info) = self.conversation.model {
1187+
if let Some(ref model_info) = self.conversation.model_info {
11881188
let (models, _default_model) = get_available_models(os).await?;
11891189
if let Some(model_option) = models.iter().find(|option| option.model_id == model_info.model_id) {
11901190
let display_name = model_option.model_name.as_deref().unwrap_or(&model_option.model_id);
@@ -2377,7 +2377,7 @@ impl ChatSession {
23772377
let mut tool_telemetry = ToolUseEventBuilder::new(
23782378
conv_id.clone(),
23792379
tool_use.id.clone(),
2380-
self.conversation.model.as_ref().map(|m| m.model_id.clone()),
2380+
self.conversation.model_info.as_ref().map(|m| m.model_id.clone()),
23812381
)
23822382
.set_tool_use_id(tool_use_id.clone())
23832383
.set_tool_name(tool_use.name.clone())

0 commit comments

Comments
 (0)