@@ -123,6 +123,16 @@ const DEFAULT_USAGE_COLLECTION_TIMEOUT_MS = 5000 // 5 seconds
123123const FORCED_CONTEXT_REDUCTION_PERCENT = 75 // Keep 75% of context (remove 25%) on context window errors
124124const MAX_CONTEXT_WINDOW_RETRIES = 3 // Maximum retries for context window errors
125125
126+ interface RateLimitRetryPayload {
127+ type : "rate_limit_retry"
128+ status : "waiting" | "retrying" | "cancelled"
129+ remainingSeconds ?: number
130+ attempt ?: number
131+ maxAttempts ?: number
132+ origin : "pre_request" | "retry_attempt"
133+ detail ?: string
134+ }
135+
126136export interface TaskOptions extends CreateTaskOptions {
127137 provider : ClineProvider
128138 apiConfiguration : ProviderSettings
@@ -1100,8 +1110,12 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
11001110 if ( partial !== undefined ) {
11011111 const lastMessage = this . clineMessages . at ( - 1 )
11021112
1113+ const isRateLimitUpdate = type === "api_req_retry_delayed" && options . metadata ?. rateLimitRetry !== undefined
11031114 const isUpdatingPreviousPartial =
1104- lastMessage && lastMessage . partial && lastMessage . type === "say" && lastMessage . say === type
1115+ lastMessage &&
1116+ lastMessage . type === "say" &&
1117+ lastMessage . say === type &&
1118+ ( lastMessage . partial || isRateLimitUpdate )
11051119
11061120 if ( partial ) {
11071121 if ( isUpdatingPreviousPartial ) {
@@ -1110,6 +1124,13 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
11101124 lastMessage . images = images
11111125 lastMessage . partial = partial
11121126 lastMessage . progressStatus = progressStatus
1127+ if ( options . metadata ) {
1128+ const messageWithMetadata = lastMessage as ClineMessage & ClineMessageWithMetadata
1129+ if ( ! messageWithMetadata . metadata ) {
1130+ messageWithMetadata . metadata = { }
1131+ }
1132+ Object . assign ( messageWithMetadata . metadata , options . metadata )
1133+ }
11131134 this . updateClineMessage ( lastMessage )
11141135 } else {
11151136 // This is a new partial message, so add it with partial state.
@@ -1197,6 +1218,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
11971218 images,
11981219 checkpoint,
11991220 contextCondense,
1221+ metadata : options . metadata ,
12001222 } )
12011223 }
12021224 }
@@ -2655,6 +2677,124 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
26552677
26562678 let rateLimitDelay = 0
26572679
2680+ const sendRateLimitUpdate = async ( payload : RateLimitRetryPayload , isPartial : boolean ) : Promise < void > => {
2681+ await this . say ( "api_req_retry_delayed" , undefined , undefined , isPartial , undefined , undefined , {
2682+ metadata : { rateLimitRetry : payload } ,
2683+ } )
2684+ }
2685+
2686+ const runRateLimitCountdown = async ( {
2687+ seconds,
2688+ origin,
2689+ attempt,
2690+ maxAttempts,
2691+ detail,
2692+ } : {
2693+ seconds : number
2694+ origin : RateLimitRetryPayload [ "origin" ]
2695+ attempt ?: number
2696+ maxAttempts ?: number
2697+ detail ?: string
2698+ } ) : Promise < boolean > => {
2699+ const normalizedSeconds = Math . max ( 0 , Math . ceil ( seconds ) )
2700+
2701+ if ( normalizedSeconds <= 0 ) {
2702+ if ( this . abort ) {
2703+ await sendRateLimitUpdate (
2704+ {
2705+ type : "rate_limit_retry" ,
2706+ status : "cancelled" ,
2707+ remainingSeconds : 0 ,
2708+ attempt,
2709+ maxAttempts,
2710+ origin,
2711+ detail,
2712+ } ,
2713+ false ,
2714+ )
2715+ return false
2716+ }
2717+
2718+ await sendRateLimitUpdate (
2719+ {
2720+ type : "rate_limit_retry" ,
2721+ status : "retrying" ,
2722+ remainingSeconds : 0 ,
2723+ attempt,
2724+ maxAttempts,
2725+ origin,
2726+ detail,
2727+ } ,
2728+ false ,
2729+ )
2730+ return true
2731+ }
2732+
2733+ for ( let i = normalizedSeconds ; i > 0 ; i -- ) {
2734+ if ( this . abort ) {
2735+ await sendRateLimitUpdate (
2736+ {
2737+ type : "rate_limit_retry" ,
2738+ status : "cancelled" ,
2739+ remainingSeconds : i ,
2740+ attempt,
2741+ maxAttempts,
2742+ origin,
2743+ detail,
2744+ } ,
2745+ false ,
2746+ )
2747+ return false
2748+ }
2749+
2750+ await sendRateLimitUpdate (
2751+ {
2752+ type : "rate_limit_retry" ,
2753+ status : "waiting" ,
2754+ remainingSeconds : i ,
2755+ attempt,
2756+ maxAttempts,
2757+ origin,
2758+ detail,
2759+ } ,
2760+ true ,
2761+ )
2762+
2763+ await delay ( 1000 )
2764+ }
2765+
2766+ if ( this . abort ) {
2767+ await sendRateLimitUpdate (
2768+ {
2769+ type : "rate_limit_retry" ,
2770+ status : "cancelled" ,
2771+ remainingSeconds : 0 ,
2772+ attempt,
2773+ maxAttempts,
2774+ origin,
2775+ detail,
2776+ } ,
2777+ false ,
2778+ )
2779+ return false
2780+ }
2781+
2782+ await sendRateLimitUpdate (
2783+ {
2784+ type : "rate_limit_retry" ,
2785+ status : "retrying" ,
2786+ remainingSeconds : 0 ,
2787+ attempt,
2788+ maxAttempts,
2789+ origin,
2790+ detail,
2791+ } ,
2792+ false ,
2793+ )
2794+
2795+ return true
2796+ }
2797+
26582798 // Use the shared timestamp so that subtasks respect the same rate-limit
26592799 // window as their parent tasks.
26602800 if ( Task . lastGlobalApiRequestTime ) {
@@ -2666,11 +2806,16 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
26662806
26672807 // Only show rate limiting message if we're not retrying. If retrying, we'll include the delay there.
26682808 if ( rateLimitDelay > 0 && retryAttempt === 0 ) {
2669- // Show countdown timer
2670- for ( let i = rateLimitDelay ; i > 0 ; i -- ) {
2671- const delayMessage = `Rate limiting for ${ i } seconds...`
2672- await this . say ( "api_req_retry_delayed" , delayMessage , undefined , true )
2673- await delay ( 1000 )
2809+ const countdownCompleted = await runRateLimitCountdown ( {
2810+ seconds : rateLimitDelay ,
2811+ origin : "pre_request" ,
2812+ attempt : 1 ,
2813+ } )
2814+
2815+ if ( ! countdownCompleted ) {
2816+ throw new Error (
2817+ `[RooCode#attemptApiRequest] task ${ this . taskId } .${ this . instanceId } aborted during pre-request rate limit wait` ,
2818+ )
26742819 }
26752820 }
26762821
@@ -2822,7 +2967,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
28222967
28232968 // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
28242969 if ( autoApprovalEnabled && alwaysApproveResubmit ) {
2825- let errorMsg
2970+ let errorMsg : string
28262971
28272972 if ( error . error ?. metadata ?. raw ) {
28282973 errorMsg = JSON . stringify ( error . error . metadata . raw , null , 2 )
@@ -2843,7 +2988,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
28432988 `[Task#attemptApiRequest] task ${ this . taskId } .${ this . instanceId } aborted during retry` ,
28442989 )
28452990 }
2846-
2991+
28472992 // Delegate generator output from the recursive call with
28482993 // incremented retry count.
28492994 yield * this . attemptApiRequest ( retryAttempt + 1 )
@@ -2913,43 +3058,108 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
29133058 const finalDelay = Math . max ( exponentialDelay , rateLimitDelay )
29143059 if ( finalDelay <= 0 ) return
29153060
2916- // Build header text; fall back to error message if none provided
2917- let headerText = header
2918- if ( ! headerText ) {
3061+ // Build detail text; fall back to error message if none provided
3062+ let errorMsg = header
3063+ if ( ! errorMsg ) {
29193064 if ( error ?. error ?. metadata ?. raw ) {
2920- headerText = JSON . stringify ( error . error . metadata . raw , null , 2 )
3065+ errorMsg = JSON . stringify ( error . error . metadata . raw , null , 2 )
29213066 } else if ( error ?. message ) {
2922- headerText = error . message
3067+ errorMsg = error . message
29233068 } else {
2924- headerText = "Unknown error"
3069+ errorMsg = "Unknown error"
3070+ }
3071+ }
3072+
3073+ // Sanitize detail for UI display
3074+ const sanitizedDetail = ( ( ) => {
3075+ if ( ! errorMsg ) {
3076+ return undefined
29253077 }
3078+ const firstLine = errorMsg
3079+ . split ( "\n" )
3080+ . map ( ( line ) => line . trim ( ) )
3081+ . find ( ( line ) => line . length > 0 )
3082+ if ( ! firstLine ) {
3083+ return undefined
3084+ }
3085+ return firstLine . length > 160 ? `${ firstLine . slice ( 0 , 157 ) } …` : firstLine
3086+ } ) ( )
3087+
3088+ // Helper to send rate limit updates with structured metadata
3089+ const sendRateLimitUpdate = async (
3090+ payload : RateLimitRetryPayload ,
3091+ isPartial : boolean ,
3092+ ) : Promise < void > => {
3093+ await this . say ( "api_req_retry_delayed" , undefined , undefined , isPartial , undefined , undefined , {
3094+ metadata : { rateLimitRetry : payload } ,
3095+ } )
29263096 }
2927- headerText = headerText ? `${ headerText } \n\n` : ""
29283097
2929- // Show countdown timer with exponential backoff
3098+ // Show countdown timer with exponential backoff using structured metadata
29303099 for ( let i = finalDelay ; i > 0 ; i -- ) {
29313100 // Check abort flag during countdown to allow early exit
29323101 if ( this . abort ) {
3102+ await sendRateLimitUpdate (
3103+ {
3104+ type : "rate_limit_retry" ,
3105+ status : "cancelled" ,
3106+ remainingSeconds : i ,
3107+ attempt : retryAttempt + 1 ,
3108+ origin : "retry_attempt" ,
3109+ detail : sanitizedDetail ,
3110+ } ,
3111+ false ,
3112+ )
29333113 throw new Error ( `[Task#${ this . taskId } ] Aborted during retry countdown` )
29343114 }
29353115
2936- await this . say (
2937- "api_req_retry_delayed" ,
2938- `${ headerText } Retry attempt ${ retryAttempt + 1 } \nRetrying in ${ i } seconds...` ,
2939- undefined ,
3116+ await sendRateLimitUpdate (
3117+ {
3118+ type : "rate_limit_retry" ,
3119+ status : "waiting" ,
3120+ remainingSeconds : i ,
3121+ attempt : retryAttempt + 1 ,
3122+ origin : "retry_attempt" ,
3123+ detail : sanitizedDetail ,
3124+ } ,
29403125 true ,
29413126 )
29423127 await delay ( 1000 )
29433128 }
29443129
2945- await this . say (
2946- "api_req_retry_delayed" ,
2947- `${ headerText } Retry attempt ${ retryAttempt + 1 } \nRetrying now...` ,
2948- undefined ,
3130+ // Final check before retrying
3131+ if ( this . abort ) {
3132+ await sendRateLimitUpdate (
3133+ {
3134+ type : "rate_limit_retry" ,
3135+ status : "cancelled" ,
3136+ remainingSeconds : 0 ,
3137+ attempt : retryAttempt + 1 ,
3138+ origin : "retry_attempt" ,
3139+ detail : sanitizedDetail ,
3140+ } ,
3141+ false ,
3142+ )
3143+ throw new Error ( `[Task#${ this . taskId } ] Aborted during retry countdown` )
3144+ }
3145+
3146+ await sendRateLimitUpdate (
3147+ {
3148+ type : "rate_limit_retry" ,
3149+ status : "retrying" ,
3150+ remainingSeconds : 0 ,
3151+ attempt : retryAttempt + 1 ,
3152+ origin : "retry_attempt" ,
3153+ detail : sanitizedDetail ,
3154+ } ,
29493155 false ,
29503156 )
29513157 } catch ( err ) {
29523158 console . error ( "Exponential backoff failed:" , err )
3159+ // Re-throw if it's an abort error so it propagates correctly
3160+ if ( err instanceof Error && err . message . includes ( "Aborted during retry countdown" ) ) {
3161+ throw err
3162+ }
29533163 }
29543164 }
29553165
0 commit comments