Skip to content

Commit c74d63d

Browse files
committed
fix: prevent duplicate retry messages when API returns retry info
- Check if error message already contains retry-related information - Avoid adding duplicate "Retry attempt X" and "Retrying in Y seconds" messages - Add test coverage for the scenario Fixes #6541
1 parent 836371c commit c74d63d

File tree

2 files changed

+131
-12
lines changed

2 files changed

+131
-12
lines changed

src/core/task/Task.ts

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2010,6 +2010,10 @@ export class Task extends EventEmitter<TaskEvents> {
20102010
errorMsg = "Unknown error"
20112011
}
20122012

2013+
// Check if the error message already contains retry-related information
2014+
// This prevents duplicate retry messages when the API itself returns retry information
2015+
const containsRetryInfo = /retry|retrying|attempt/i.test(errorMsg)
2016+
20132017
const baseDelay = requestDelaySeconds || 5
20142018
let exponentialDelay = Math.min(
20152019
Math.ceil(baseDelay * Math.pow(2, retryAttempt)),
@@ -2034,21 +2038,21 @@ export class Task extends EventEmitter<TaskEvents> {
20342038

20352039
// Show countdown timer with exponential backoff
20362040
for (let i = finalDelay; i > 0; i--) {
2037-
await this.say(
2038-
"api_req_retry_delayed",
2039-
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
2040-
undefined,
2041-
true,
2042-
)
2041+
// If the error already contains retry info, don't add our own retry message
2042+
const retryMessage = containsRetryInfo
2043+
? `${errorMsg}\n\nRetrying in ${i} seconds...`
2044+
: `${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`
2045+
2046+
await this.say("api_req_retry_delayed", retryMessage, undefined, true)
20432047
await delay(1000)
20442048
}
20452049

2046-
await this.say(
2047-
"api_req_retry_delayed",
2048-
`${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying now...`,
2049-
undefined,
2050-
false,
2051-
)
2050+
// Final retry message
2051+
const finalRetryMessage = containsRetryInfo
2052+
? `${errorMsg}\n\nRetrying now...`
2053+
: `${errorMsg}\n\nRetry attempt ${retryAttempt + 1}\nRetrying now...`
2054+
2055+
await this.say("api_req_retry_delayed", finalRetryMessage, undefined, false)
20522056

20532057
// Delegate generator output from the recursive call with
20542058
// incremented retry count.

src/core/task/__tests__/Task.spec.ts

Lines changed: 115 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -872,6 +872,121 @@ describe("Cline", () => {
872872
await task.catch(() => {})
873873
})
874874

875+
it("should not duplicate retry messages when error already contains retry info", async () => {
876+
// Mock delay before creating the task
877+
const mockDelay = vi.fn().mockResolvedValue(undefined)
878+
vi.mocked(delay).mockImplementation(mockDelay)
879+
880+
const [cline, task] = Task.create({
881+
provider: mockProvider,
882+
apiConfiguration: mockApiConfig,
883+
task: "test task",
884+
})
885+
886+
// Mock say to track messages
887+
const saySpy = vi.spyOn(cline, "say").mockResolvedValue(undefined)
888+
889+
// Create an error that already contains retry information
890+
const mockError = new Error("Engine loop is not running. Retry attempt 1\nRetrying now...")
891+
892+
// Mock createMessage to fail first then succeed
893+
let callCount = 0
894+
vi.spyOn(cline.api, "createMessage").mockImplementation(() => {
895+
callCount++
896+
if (callCount === 1) {
897+
// First call fails - create a proper async iterator that throws
898+
const failedIterator = {
899+
[Symbol.asyncIterator]: () => ({
900+
next: async () => {
901+
throw mockError
902+
},
903+
}),
904+
}
905+
return failedIterator as any
906+
} else {
907+
// Subsequent calls succeed
908+
return {
909+
async *[Symbol.asyncIterator]() {
910+
yield { type: "text", text: "Success" } as ApiStreamChunk
911+
},
912+
} as any
913+
}
914+
})
915+
916+
// Set alwaysApproveResubmit and requestDelaySeconds
917+
mockProvider.getState = vi.fn().mockResolvedValue({
918+
alwaysApproveResubmit: true,
919+
autoApprovalEnabled: true,
920+
requestDelaySeconds: 3,
921+
})
922+
923+
// Mock previous API request message
924+
cline.clineMessages = [
925+
{
926+
ts: Date.now(),
927+
type: "say",
928+
say: "api_req_started",
929+
text: JSON.stringify({
930+
tokensIn: 100,
931+
tokensOut: 50,
932+
cacheWrites: 0,
933+
cacheReads: 0,
934+
request: "test request",
935+
}),
936+
},
937+
]
938+
939+
// Abandon the task to prevent hanging
940+
cline.abandoned = true
941+
942+
try {
943+
// Trigger API request - this will throw due to abandoned state
944+
const iterator = cline.attemptApiRequest(0)
945+
946+
// Try to get the first value, which should trigger the error and retry logic
947+
try {
948+
await iterator.next()
949+
} catch (e) {
950+
// Expected to throw due to abandoned state
951+
}
952+
953+
// Wait a bit for async operations
954+
await new Promise((resolve) => setTimeout(resolve, 100))
955+
956+
// Calculate expected delay for first retry
957+
const baseDelay = 3 // from requestDelaySeconds
958+
959+
// Verify countdown messages don't include duplicate retry attempt info
960+
for (let i = baseDelay; i > 0; i--) {
961+
const calls = saySpy.mock.calls.filter(
962+
(call) =>
963+
call[0] === "api_req_retry_delayed" && call[1]?.includes(`Retrying in ${i} seconds`),
964+
)
965+
if (calls.length > 0) {
966+
const message = calls[0][1] as string
967+
// The error message contains "Retry attempt 1", but our code should not add another "Retry attempt X"
968+
// So we should only see one occurrence (from the original error)
969+
const retryAttemptMatches = (message.match(/Retry attempt/g) || []).length
970+
expect(retryAttemptMatches).toBe(1) // Only the one from the error message
971+
}
972+
}
973+
974+
// Verify final retry message
975+
const finalCalls = saySpy.mock.calls.filter(
976+
(call) => call[0] === "api_req_retry_delayed" && call[1]?.includes("Retrying now"),
977+
)
978+
if (finalCalls.length > 0) {
979+
const finalMessage = finalCalls[0][1] as string
980+
// Since the error already contains "Retrying now", our code should not add another one
981+
// So we should only see 1 occurrence total
982+
const retryingNowMatches = (finalMessage.match(/Retrying now/g) || []).length
983+
expect(retryingNowMatches).toBe(1) // Only one occurrence, not duplicated
984+
}
985+
} finally {
986+
await task.catch(() => {})
987+
}
988+
})
989+
875990
describe("processUserContentMentions", () => {
876991
it("should process mentions in task and feedback tags", async () => {
877992
const [cline, task] = Task.create({

0 commit comments

Comments
 (0)