Skip to content

Commit 3997a42

Browse files
committed
Fix eager compaction
The result events include subagent + other token counts that we are not interested in
1 parent 04695b1 commit 3997a42

File tree

2 files changed

+22
-18
lines changed

2 files changed

+22
-18
lines changed

apps/desktop/src/lib/codegen/messages.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -334,6 +334,7 @@ export function usageStats(events: ClaudeMessage[]): {
334334
if (lastAssistantMessage) {
335335
const usage = lastAssistantMessage.message.usage;
336336
tokens += usage.cache_read_input_tokens ?? 0;
337+
tokens += usage.cache_creation_input_tokens ?? 0;
337338
tokens += usage.input_tokens;
338339
tokens += usage.output_tokens;
339340
const modelPricing = findModelPricing(lastAssistantMessage.message.model);

crates/but-claude/src/compact.rs

Lines changed: 21 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
//! of the conversation so far and then start a new session where the first
1313
//! message contains the summary
1414
15-
use std::{collections::HashMap, sync::Arc};
15+
use std::sync::Arc;
1616

1717
use anyhow::{Context, Result};
1818
use but_broadcaster::Broadcaster;
@@ -34,11 +34,11 @@ use crate::{
3434
};
3535

3636
#[derive(Deserialize, Debug, Clone)]
37-
#[serde(rename_all = "camelCase")]
3837
struct ModelUsage {
3938
input_tokens: u32,
4039
output_tokens: u32,
4140
cache_read_input_tokens: Option<u32>,
41+
cache_creation_input_tokens: Option<u32>,
4242
}
4343

4444
#[derive(Debug)]
@@ -189,7 +189,7 @@ impl Claudes {
189189
// Find the last result message
190190
let Some(output) = messages.into_iter().rev().find_map(|m| match m.content {
191191
ClaudeMessageContent::ClaudeOutput(o) => {
192-
if o["type"].as_str() == Some("result") {
192+
if o["type"].as_str() == Some("assistant") {
193193
Some(o)
194194
} else {
195195
None
@@ -200,21 +200,24 @@ impl Claudes {
200200
return Ok(());
201201
};
202202

203-
let usage: HashMap<String, ModelUsage> =
204-
serde_json::from_value(output["modelUsage"].clone())?;
205-
206-
for (name, usage) in usage {
207-
if let Some(model) = find_model(name) {
208-
let total = usage.cache_read_input_tokens.unwrap_or(0)
209-
+ usage.input_tokens
210-
+ usage.output_tokens;
211-
if total > (model.context - COMPACTION_BUFFER) {
212-
self.compact(ctx.clone(), broadcaster.clone(), stack_id)
213-
.await;
214-
break;
215-
}
216-
};
217-
}
203+
let model_name = output["message"]["model"]
204+
.as_str()
205+
.context("could not find model property")?;
206+
dbg!(&output);
207+
208+
if let Some(model) = find_model(model_name.to_owned()) {
209+
let usage: ModelUsage = serde_json::from_value(output["message"]["usage"].clone())?;
210+
211+
let total = usage.cache_read_input_tokens.unwrap_or(0)
212+
+ usage.cache_creation_input_tokens.unwrap_or(0)
213+
+ usage.input_tokens
214+
+ usage.output_tokens;
215+
dbg!(total, model.context - COMPACTION_BUFFER);
216+
if total > (model.context - COMPACTION_BUFFER) {
217+
self.compact(ctx.clone(), broadcaster.clone(), stack_id)
218+
.await;
219+
}
220+
};
218221

219222
Ok(())
220223
}

0 commit comments

Comments
 (0)