@@ -3243,12 +3243,15 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
32433243
32443244 // Apply exponential backoff similar to first-chunk errors when auto-resubmit is enabled
32453245 const stateForBackoff = await this . providerRef . deref ( ) ?. getState ( )
3246- if ( stateForBackoff ?. autoApprovalEnabled && stateForBackoff ?. alwaysApproveResubmit ) {
3247- await this . backoffAndAnnounce (
3248- currentItem . retryAttempt ?? 0 ,
3249- error ,
3250- streamingFailedMessage ,
3251- )
3246+ const currentAttempt = currentItem . retryAttempt ?? 0
3247+ const maxRetries = stateForBackoff ?. autoRetryMax ?? 0
3248+
3249+ if (
3250+ stateForBackoff ?. autoApprovalEnabled &&
3251+ stateForBackoff ?. alwaysApproveResubmit &&
3252+ ( maxRetries === 0 || currentAttempt < maxRetries )
3253+ ) {
3254+ await this . backoffAndAnnounce ( currentAttempt , error , streamingFailedMessage )
32523255
32533256 // Check if task was aborted during the backoff
32543257 if ( this . abort ) {
@@ -3260,17 +3263,17 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
32603263 await this . abortTask ( )
32613264 break
32623265 }
3263- }
32643266
3265- // Push the same content back onto the stack to retry, incrementing the retry attempt counter
3266- stack . push ( {
3267- userContent : currentUserContent ,
3268- includeFileDetails : false ,
3269- retryAttempt : ( currentItem . retryAttempt ?? 0 ) + 1 ,
3270- } )
3267+ // Push the same content back onto the stack to retry, incrementing the retry attempt counter
3268+ stack . push ( {
3269+ userContent : currentUserContent ,
3270+ includeFileDetails : false ,
3271+ retryAttempt : currentAttempt + 1 ,
3272+ } )
32713273
3272- // Continue to retry the request
3273- continue
3274+ // Continue to retry the request
3275+ continue
3276+ }
32743277 }
32753278 }
32763279 } finally {
@@ -3510,13 +3513,20 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
35103513
35113514 // Check if we should auto-retry or prompt the user
35123515 // Reuse the state variable from above
3513- if ( state ?. autoApprovalEnabled && state ?. alwaysApproveResubmit ) {
3516+ const currentAttemptEmpty = currentItem . retryAttempt ?? 0
3517+ const maxRetriesEmpty = state ?. autoRetryMax ?? 0
3518+
3519+ if (
3520+ state ?. autoApprovalEnabled &&
3521+ state ?. alwaysApproveResubmit &&
3522+ ( maxRetriesEmpty === 0 || currentAttemptEmpty < maxRetriesEmpty )
3523+ ) {
35143524 // Auto-retry with backoff - don't persist failure message when retrying
35153525 const errorMsg =
35163526 "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output."
35173527
35183528 await this . backoffAndAnnounce (
3519- currentItem . retryAttempt ?? 0 ,
3529+ currentAttemptEmpty ,
35203530 new Error ( "Empty assistant response" ) ,
35213531 errorMsg ,
35223532 )
@@ -3534,7 +3544,7 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
35343544 stack . push ( {
35353545 userContent : currentUserContent ,
35363546 includeFileDetails : false ,
3537- retryAttempt : ( currentItem . retryAttempt ?? 0 ) + 1 ,
3547+ retryAttempt : currentAttemptEmpty + 1 ,
35383548 userMessageWasRemoved : true ,
35393549 } )
35403550
@@ -3888,7 +3898,6 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
38883898 apiConfiguration,
38893899 autoApprovalEnabled,
38903900 alwaysApproveResubmit,
3891- requestDelaySeconds,
38923901 mode,
38933902 autoCondenseContext = true ,
38943903 autoCondenseContextPercent = 100 ,
@@ -4229,34 +4238,41 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
42294238 // kilocode_change end
42304239 // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
42314240 if ( autoApprovalEnabled && alwaysApproveResubmit ) {
4232- let errorMsg
4233-
4234- if ( error . error ?. metadata ?. raw ) {
4235- errorMsg = JSON . stringify ( error . error . metadata . raw , null , 2 )
4236- } else if ( error . message ) {
4237- errorMsg = error . message
4241+ const maxRetries = state ?. autoRetryMax || 0
4242+ if ( maxRetries > 0 && retryAttempt >= maxRetries ) {
4243+ // Max retries reached, fall through to user prompt
42384244 } else {
4239- errorMsg = "Unknown error"
4240- }
4245+ let errorMsg
42414246
4242- // Apply shared exponential backoff and countdown UX
4243- await this . backoffAndAnnounce ( retryAttempt , error , errorMsg )
4247+ if ( error . error ?. metadata ?. raw ) {
4248+ errorMsg = JSON . stringify ( error . error . metadata . raw , null , 2 )
4249+ } else if ( error . message ) {
4250+ errorMsg = error . message
4251+ } else {
4252+ errorMsg = "Unknown error"
4253+ }
42444254
4245- // CRITICAL: Check if task was aborted during the backoff countdown
4246- // This prevents infinite loops when users cancel during auto-retry
4247- // Without this check, the recursive call below would continue even after abort
4248- if ( this . abort ) {
4249- throw new Error (
4250- `[Task#attemptApiRequest] task ${ this . taskId } .${ this . instanceId } aborted during retry` ,
4251- )
4252- }
4255+ // Apply shared exponential backoff and countdown UX
4256+ await this . backoffAndAnnounce ( retryAttempt , error , errorMsg )
42534257
4254- // Delegate generator output from the recursive call with
4255- // incremented retry count.
4256- yield * this . attemptApiRequest ( retryAttempt + 1 )
4258+ // CRITICAL: Check if task was aborted during the backoff countdown
4259+ // This prevents infinite loops when users cancel during auto-retry
4260+ // Without this check, the recursive call below would continue even after abort
4261+ if ( this . abort ) {
4262+ throw new Error (
4263+ `[Task#attemptApiRequest] task ${ this . taskId } .${ this . instanceId } aborted during retry` ,
4264+ )
4265+ }
42574266
4258- return
4259- } else {
4267+ // Delegate generator output from the recursive call with
4268+ // incremented retry count.
4269+ yield * this . attemptApiRequest ( retryAttempt + 1 )
4270+
4271+ return
4272+ }
4273+ }
4274+
4275+ {
42604276 const { response } = await this . ask (
42614277 "api_req_failed" ,
42624278 error . message ?? JSON . stringify ( serializeError ( error ) , null , 2 ) ,
@@ -4304,11 +4320,14 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
43044320 try {
43054321 const state = await this . providerRef . deref ( ) ?. getState ( )
43064322 const baseDelay = state ?. requestDelaySeconds || 5
4323+ const strategy = state ?. autoRetryStrategy || "exponential"
43074324
4308- let exponentialDelay = Math . min (
4309- Math . ceil ( baseDelay * Math . pow ( 2 , retryAttempt ) ) ,
4310- MAX_EXPONENTIAL_BACKOFF_SECONDS ,
4311- )
4325+ let retryDelay = baseDelay
4326+ if ( strategy === "linear" ) {
4327+ retryDelay = baseDelay * ( retryAttempt + 1 )
4328+ } else if ( strategy === "exponential" ) {
4329+ retryDelay = Math . min ( Math . ceil ( baseDelay * Math . pow ( 2 , retryAttempt ) ) , MAX_EXPONENTIAL_BACKOFF_SECONDS )
4330+ }
43124331
43134332 // Respect provider rate limit window
43144333 let rateLimitDelay = 0
@@ -4325,11 +4344,11 @@ export class Task extends EventEmitter<TaskEvents> implements TaskLike {
43254344 )
43264345 const match = retryInfo ?. retryDelay ?. match ?.( / ^ ( \d + ) s $ / )
43274346 if ( match ) {
4328- exponentialDelay = Number ( match [ 1 ] ) + 1
4347+ retryDelay = Number ( match [ 1 ] ) + 1
43294348 }
43304349 }
43314350
4332- const finalDelay = Math . max ( exponentialDelay , rateLimitDelay )
4351+ const finalDelay = Math . max ( retryDelay , rateLimitDelay )
43334352 if ( finalDelay <= 0 ) return
43344353
43354354 // Build header text; fall back to error message if none provided
0 commit comments