Skip to content

Commit 002355f

Browse files
committed
feat: enhance conversation truncation logic to preserve dialog integrity and handle large message counts
1 parent eb74f02 commit 002355f

File tree

3 files changed

+347
-47
lines changed

3 files changed

+347
-47
lines changed

src/core/Cline.ts

Lines changed: 203 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,10 @@ export class Cline extends EventEmitter<ClineEvents> {
122122
// Subtasks
123123
readonly rootTask: Cline | undefined = undefined
124124
readonly parentTask: Cline | undefined = undefined
125+
126+
// A timer used to save history for anti-shake
127+
private _saveApiConversationHistoryTimeout?: NodeJS.Timeout
128+
private _saveClineMessagesTimeout?: NodeJS.Timeout
125129
readonly taskNumber: number
126130
private isPaused: boolean = false
127131
private pausedModeSlug: string = defaultModeSlug
@@ -313,8 +317,85 @@ export class Cline extends EventEmitter<ClineEvents> {
313317

314318
private async saveApiConversationHistory() {
315319
try {
316-
const filePath = path.join(await this.ensureTaskDirectoryExists(), GlobalFileNames.apiConversationHistory)
317-
await fs.writeFile(filePath, JSON.stringify(this.apiConversationHistory))
320+
// Add anti-shake to prevent frequent history saving
321+
if (this._saveApiConversationHistoryTimeout) {
322+
clearTimeout(this._saveApiConversationHistoryTimeout)
323+
}
324+
325+
this._saveApiConversationHistoryTimeout = setTimeout(async () => {
326+
const filePath = path.join(
327+
await this.ensureTaskDirectoryExists(),
328+
GlobalFileNames.apiConversationHistory,
329+
)
330+
331+
// Check the history length, log and truncate
332+
if (this.apiConversationHistory.length > 50) {
333+
console.log(
334+
`Long API conversation history detected: ${this.apiConversationHistory.length} messages, performing truncation before saving`,
335+
)
336+
337+
// Record memory usage
338+
try {
339+
const memoryUsage = process.memoryUsage()
340+
console.log(
341+
`Memory usage before truncation: RSS=${Math.round(memoryUsage.rss / 1024 / 1024)}MB, Heap=${Math.round(memoryUsage.heapUsed / 1024 / 1024)}/${Math.round(memoryUsage.heapTotal / 1024 / 1024)}MB`,
342+
)
343+
} catch (err) {
344+
// Ignore the error, this is just diagnostic information
345+
}
346+
347+
// Retain the earliest system messages, user instructions, and some recent messages
348+
const keepFirst = 5
349+
const keepLast = 50
350+
351+
if (this.apiConversationHistory.length > keepFirst + keepLast) {
352+
const firstPart = this.apiConversationHistory.slice(0, keepFirst)
353+
const lastPart = this.apiConversationHistory.slice(-keepLast)
354+
const removedCount = this.apiConversationHistory.length - (firstPart.length + lastPart.length)
355+
console.log(
356+
`Truncating API conversation history: removed ${removedCount} messages, keeping ${firstPart.length} first and ${lastPart.length} last messages`,
357+
)
358+
this.apiConversationHistory = [...firstPart, ...lastPart]
359+
360+
// Enforce garbage collection
361+
if (typeof global.gc === "function") {
362+
try {
363+
global.gc()
364+
// The garbage collection is performed again after a delay of 100ms to ensure that the memory is completely released
365+
setTimeout(() => {
366+
if (typeof global.gc === "function") {
367+
try {
368+
global.gc()
369+
console.log(
370+
"Second manual garbage collection triggered to ensure memory release",
371+
)
372+
} catch (err) {
373+
// Ignore the error, this is just diagnostic information
374+
}
375+
}
376+
}, 100)
377+
console.log("Manual garbage collection triggered after history truncation")
378+
} catch (err) {
379+
// Ignore the error, this is just diagnostic information
380+
}
381+
}
382+
383+
// Record memory usage
384+
try {
385+
const memoryUsage = process.memoryUsage()
386+
console.log(
387+
`Memory usage after truncation: RSS=${Math.round(memoryUsage.rss / 1024 / 1024)}MB, Heap=${Math.round(memoryUsage.heapUsed / 1024 / 1024)}/${Math.round(memoryUsage.heapTotal / 1024 / 1024)}MB`,
388+
)
389+
} catch (err) {
390+
// Ignore the error, this is just diagnostic information
391+
}
392+
}
393+
}
394+
395+
await fs.writeFile(filePath, JSON.stringify(this.apiConversationHistory))
396+
397+
this._saveApiConversationHistoryTimeout = undefined
398+
}, 100) // Further reduce debounce delay to 100ms, speeding up saves and reducing delay risks
318399
} catch (error) {
319400
// in the off chance this fails, we don't want to stop the task
320401
console.error("Failed to save API conversation history:", error)
@@ -362,42 +443,131 @@ export class Cline extends EventEmitter<ClineEvents> {
362443

363444
private async saveClineMessages() {
364445
try {
365-
const taskDir = await this.ensureTaskDirectoryExists()
366-
const filePath = path.join(taskDir, GlobalFileNames.uiMessages)
367-
await fs.writeFile(filePath, JSON.stringify(this.clineMessages))
368-
// combined as they are in ChatView
369-
const apiMetrics = this.getTokenUsage()
370-
const taskMessage = this.clineMessages[0] // first message is always the task say
371-
const lastRelevantMessage =
372-
this.clineMessages[
373-
findLastIndex(
374-
this.clineMessages,
375-
(m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
446+
// Add anti-shake to prevent frequent saving of message history
447+
if (this._saveClineMessagesTimeout) {
448+
clearTimeout(this._saveClineMessagesTimeout)
449+
}
450+
451+
this._saveClineMessagesTimeout = setTimeout(async () => {
452+
const taskDir = await this.ensureTaskDirectoryExists()
453+
const filePath = path.join(taskDir, GlobalFileNames.uiMessages)
454+
455+
// Check the message history length and perform a more aggressive truncation if it is too long
456+
if (this.clineMessages.length > 50) {
457+
console.log(
458+
`Long UI message history detected: ${this.clineMessages.length} messages, trimming messages before saving`,
376459
)
377-
]
378460

379-
let taskDirSize = 0
461+
// Record memory usage before truncation
462+
try {
463+
const memoryUsage = process.memoryUsage()
464+
console.log(
465+
`Memory usage before UI message truncation: RSS=${Math.round(memoryUsage.rss / 1024 / 1024)}MB, Heap=${Math.round(memoryUsage.heapUsed / 1024 / 1024)}/${Math.round(memoryUsage.heapTotal / 1024 / 1024)}MB`,
466+
)
467+
} catch (err) {
468+
// Ignore error
469+
}
380470

381-
try {
382-
taskDirSize = await getFolderSize.loose(taskDir)
383-
} catch (err) {
384-
console.error(
385-
`[saveClineMessages] failed to get task directory size (${taskDir}): ${err instanceof Error ? err.message : String(err)}`,
471+
// Keep the earliest user commands and some recent messages
472+
const keepFirst = 3
473+
const keepLast = 40
474+
475+
if (this.clineMessages.length > keepFirst + keepLast) {
476+
// Pre-truncation backups are no longer created to reduce memory usage
477+
// const fullMessages = [...this.clineMessages];
478+
479+
const firstPart = this.clineMessages.slice(0, keepFirst)
480+
const lastPart = this.clineMessages.slice(-keepLast)
481+
const removedCount = this.clineMessages.length - (firstPart.length + lastPart.length)
482+
console.log(
483+
`Truncating UI message history: removed ${removedCount} messages, keeping ${firstPart.length} first and ${lastPart.length} last messages`,
484+
)
485+
486+
// Update an array in memory
487+
this.clineMessages = [...firstPart, ...lastPart]
488+
489+
// Notify the front end to update the UI to prevent UI state from being inconsistent with the back end
490+
this.providerRef
491+
.deref()
492+
?.postStateToWebview()
493+
.catch((err) => {
494+
console.error("Failed to update webview after message truncation:", err)
495+
})
496+
497+
// Force garbage collection to ensure that memory is freed
498+
if (typeof global.gc === "function") {
499+
try {
500+
global.gc()
501+
// The garbage collection is performed again after a delay of 100ms to ensure that the memory is completely released
502+
setTimeout(() => {
503+
if (typeof global.gc === "function") {
504+
try {
505+
global.gc()
506+
console.log(
507+
"Second manual garbage collection triggered to ensure memory release",
508+
)
509+
} catch (err) {
510+
// Ignore error
511+
}
512+
}
513+
}, 100)
514+
console.log("Manual garbage collection triggered after UI message truncation")
515+
} catch (err) {
516+
// Ignore error
517+
}
518+
}
519+
520+
// Record memory usage after truncation
521+
try {
522+
const memoryUsage = process.memoryUsage()
523+
console.log(
524+
`Memory usage after UI message truncation: RSS=${Math.round(memoryUsage.rss / 1024 / 1024)}MB, Heap=${Math.round(memoryUsage.heapUsed / 1024 / 1024)}/${Math.round(memoryUsage.heapTotal / 1024 / 1024)}MB`,
525+
)
526+
} catch (err) {
527+
// Ignore error
528+
}
529+
}
530+
}
531+
532+
await fs.writeFile(filePath, JSON.stringify(this.clineMessages))
533+
// combined as they are in ChatView
534+
const apiMetrics = getApiMetrics(
535+
combineApiRequests(combineCommandSequences(this.clineMessages.slice(1))),
386536
)
387-
}
537+
const taskMessage = this.clineMessages[0] // first message is always the task say
538+
const lastRelevantMessage =
539+
this.clineMessages[
540+
findLastIndex(
541+
this.clineMessages,
542+
(m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
543+
)
544+
]
388545

389-
await this.providerRef.deref()?.updateTaskHistory({
390-
id: this.taskId,
391-
number: this.taskNumber,
392-
ts: lastRelevantMessage.ts,
393-
task: taskMessage.text ?? "",
394-
tokensIn: apiMetrics.totalTokensIn,
395-
tokensOut: apiMetrics.totalTokensOut,
396-
cacheWrites: apiMetrics.totalCacheWrites,
397-
cacheReads: apiMetrics.totalCacheReads,
398-
totalCost: apiMetrics.totalCost,
399-
size: taskDirSize,
400-
})
546+
let taskDirSize = 0
547+
548+
try {
549+
taskDirSize = await getFolderSize.loose(taskDir)
550+
} catch (err) {
551+
console.error(
552+
`[saveClineMessages] failed to get task directory size (${taskDir}): ${err instanceof Error ? err.message : String(err)}`,
553+
)
554+
}
555+
556+
await this.providerRef.deref()?.updateTaskHistory({
557+
id: this.taskId,
558+
number: this.taskNumber,
559+
ts: lastRelevantMessage.ts,
560+
task: taskMessage.text ?? "",
561+
tokensIn: apiMetrics.totalTokensIn,
562+
tokensOut: apiMetrics.totalTokensOut,
563+
cacheWrites: apiMetrics.totalCacheWrites,
564+
cacheReads: apiMetrics.totalCacheReads,
565+
totalCost: apiMetrics.totalCost,
566+
size: taskDirSize,
567+
})
568+
569+
this._saveClineMessagesTimeout = undefined
570+
}, 100) // Reduce buffering latency further to 100ms to speed up saving and reduce the risk of delays
401571
} catch (error) {
402572
console.error("Failed to save cline messages:", error)
403573
}

src/core/sliding-window/__tests__/sliding-window.test.ts

Lines changed: 80 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -67,10 +67,10 @@ describe("truncateConversation", () => {
6767
// 2 is already even, so no rounding needed
6868
const result = truncateConversation(messages, 0.5)
6969

70-
expect(result.length).toBe(3)
70+
expect(result.length).toBe(5)
7171
expect(result[0]).toEqual(messages[0])
72-
expect(result[1]).toEqual(messages[3])
73-
expect(result[2]).toEqual(messages[4])
72+
expect(result[1]).toEqual(messages[1])
73+
expect(result[2]).toEqual(messages[2])
7474
})
7575

7676
it("should round to an even number of messages to remove", () => {
@@ -88,8 +88,50 @@ describe("truncateConversation", () => {
8888
// 1.8 rounds down to 1, then to 0 to make it even
8989
const result = truncateConversation(messages, 0.3)
9090

91-
expect(result.length).toBe(7) // No messages removed
92-
expect(result).toEqual(messages)
91+
expect(result.length).toBe(6) // No messages removed
92+
expect(result[0]).toEqual(messages[0])
93+
expect(result[1]).toEqual(messages[2])
94+
expect(result[2]).toEqual(messages[3])
95+
expect(result[3]).toEqual(messages[4])
96+
expect(result[4]).toEqual(messages[5])
97+
expect(result[5]).toEqual(messages[6])
98+
})
99+
100+
it("should round to an 1 number of messages to remove", () => {
101+
const messages: Anthropic.Messages.MessageParam[] = [
102+
{ role: "user", content: "First message" },
103+
{ role: "assistant", content: "Second message" },
104+
{ role: "user", content: "Third message" },
105+
{ role: "assistant", content: "Fourth message" },
106+
{ role: "user", content: "Fifth message" },
107+
{ role: "assistant", content: "Sixth message" },
108+
{ role: "user", content: "Seventh message" },
109+
{ role: "user", content: "Eighth message" },
110+
{ role: "user", content: "Ninth message" },
111+
{ role: "user", content: "Tenth message" },
112+
{ role: "user", content: "Eleventh message" },
113+
{ role: "user", content: "Twelfth message" },
114+
{ role: "user", content: "Thirteenth message" },
115+
{ role: "user", content: "Fourteenth message" },
116+
{ role: "user", content: "Fifteenth message" },
117+
{ role: "user", content: "Sixteenth message" },
118+
{ role: "user", content: "Seventeenth message" },
119+
{ role: "user", content: "Eighteenth message" },
120+
{ role: "user", content: "Nineteenth message" },
121+
{ role: "user", content: "Twentieth message" },
122+
]
123+
124+
// 6 messages excluding first, 0.3 fraction = 1.8 messages to remove
125+
// 1.8 rounds down to 1, then to 0 to make it even
126+
const result = truncateConversation(messages, 0.3)
127+
128+
expect(result.length).toBe(17) // No messages removed
129+
expect(result[0]).toEqual(messages[0])
130+
expect(result[1]).toEqual(messages[1])
131+
expect(result[2]).toEqual(messages[2])
132+
expect(result[4]).toEqual(messages[7])
133+
expect(result[5]).toEqual(messages[8])
134+
expect(result[result.length - 1]).toEqual(messages[19])
93135
})
94136

95137
it("should handle edge case with fracToRemove = 0", () => {
@@ -116,9 +158,11 @@ describe("truncateConversation", () => {
116158
// But 3 is odd, so it rounds down to 2 to make it even
117159
const result = truncateConversation(messages, 1)
118160

119-
expect(result.length).toBe(2)
161+
expect(result.length).toBe(4)
120162
expect(result[0]).toEqual(messages[0])
121-
expect(result[1]).toEqual(messages[3])
163+
expect(result[1]).toEqual(messages[1])
164+
expect(result[2]).toEqual(messages[2])
165+
expect(result[3]).toEqual(messages[3])
122166
})
123167
})
124168

@@ -215,6 +259,34 @@ describe("estimateTokenCount", () => {
215259
* Tests for the truncateConversationIfNeeded function
216260
*/
217261
describe("truncateConversationIfNeeded", () => {
262+
it("should truncate when message count exceeds MAX_HISTORY_MESSAGES", async () => {
263+
const MAX_HISTORY_MESSAGES = 100
264+
265+
const messages: Anthropic.Messages.MessageParam[] = []
266+
messages.push({ role: "user", content: "System message" }) // 系统消息
267+
268+
for (let i = 1; i <= MAX_HISTORY_MESSAGES; i++) {
269+
messages.push({ role: i % 2 === 1 ? "user" : "assistant", content: `Message ${i}` })
270+
}
271+
272+
expect(messages.length).toBeGreaterThan(MAX_HISTORY_MESSAGES)
273+
274+
const result = await truncateConversationIfNeeded({
275+
messages,
276+
totalTokens: 1000,
277+
contextWindow: 100000,
278+
maxTokens: 30000,
279+
apiHandler: mockApiHandler,
280+
})
281+
282+
expect(result.length).toBeLessThan(messages.length)
283+
expect(result[0]).toEqual(messages[0])
284+
expect(result.length).toBeLessThan(MAX_HISTORY_MESSAGES)
285+
286+
const expectedMaxLength = Math.ceil(messages.length * 0.3) + 1 // +1 因为第一条消息总是保留
287+
expect(result.length).toBeLessThanOrEqual(expectedMaxLength)
288+
})
289+
218290
const createModelInfo = (contextWindow: number, maxTokens?: number): ModelInfo => ({
219291
contextWindow,
220292
supportsPromptCache: true,
@@ -391,7 +463,7 @@ describe("truncateConversationIfNeeded", () => {
391463
it("should truncate if tokens are within TOKEN_BUFFER_PERCENTAGE of the threshold", async () => {
392464
const modelInfo = createModelInfo(100000, 30000)
393465
const maxTokens = 100000 - 30000 // 70000
394-
const dynamicBuffer = modelInfo.contextWindow * TOKEN_BUFFER_PERCENTAGE // 10% of 100000 = 10000
466+
const dynamicBuffer = modelInfo.contextWindow * TOKEN_BUFFER_PERCENTAGE // 10% of context window = 10000
395467
const totalTokens = 70000 - dynamicBuffer + 1 // Just within the dynamic buffer of threshold (70000)
396468

397469
// Create messages with very small content in the last one to avoid token overflow

0 commit comments

Comments
 (0)