Skip to content

Commit c5e6efa

Browse files
committed
fix(agent-runtime): Prevent fallbacks from triggering on user abort
- Check isAbortError in catch blocks to propagate aborts immediately - Detect both custom ABORT_ERROR_MESSAGE and native AbortError
1 parent da6ab1d commit c5e6efa

File tree

2 files changed

+70
-39
lines changed

2 files changed

+70
-39
lines changed

packages/agent-runtime/src/llm-api/gemini-with-fallbacks.ts

Lines changed: 28 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { openaiModels, openrouterModels } from '@codebuff/common/old-constants'
2+
import { isAbortError, unwrapPromptResult } from '@codebuff/common/util/error'
23

34
import type {
45
FinetunedVertexModel,
@@ -31,7 +32,8 @@ import type { Message } from '@codebuff/common/types/messages/codebuff-message'
3132
* @param options.costMode - Optional cost mode ('free', 'normal', 'max') influencing fallback model choice.
3233
* @param options.useGPT4oInsteadOfClaude - Optional flag to use GPT-4o instead of Claude as the final fallback.
3334
* @returns A promise that resolves to the complete response string from the successful API call.
34-
* @throws If all API calls (primary and fallbacks) fail.
35+
* @throws {Error} If all API calls (primary and fallbacks) fail.
36+
* @throws {Error} When the request is aborted by user. Check with `isAbortError()`. Aborts are not retried.
3537
*/
3638
export async function promptFlashWithFallbacks(
3739
params: {
@@ -56,12 +58,18 @@ export async function promptFlashWithFallbacks(
5658
// Try finetuned model first if enabled
5759
if (useFinetunedModel) {
5860
try {
59-
return await promptAiSdk({
60-
...params,
61-
messages,
62-
model: useFinetunedModel,
63-
})
61+
return unwrapPromptResult(
62+
await promptAiSdk({
63+
...params,
64+
messages,
65+
model: useFinetunedModel,
66+
}),
67+
)
6468
} catch (error) {
69+
// Don't fall back on user-initiated aborts - propagate immediately
70+
if (isAbortError(error)) {
71+
throw error
72+
}
6573
logger.warn(
6674
{ error },
6775
'Error calling finetuned model, falling back to Gemini API',
@@ -71,18 +79,24 @@ export async function promptFlashWithFallbacks(
7179

7280
try {
7381
// First try Gemini
74-
return await promptAiSdk({ ...params, messages })
82+
return unwrapPromptResult(await promptAiSdk({ ...params, messages }))
7583
} catch (error) {
84+
// Don't fall back on user-initiated aborts - propagate immediately
85+
if (isAbortError(error)) {
86+
throw error
87+
}
7688
logger.warn(
7789
{ error },
7890
`Error calling Gemini API, falling back to ${useGPT4oInsteadOfClaude ? 'gpt-4o' : 'Claude'}`,
7991
)
80-
return await promptAiSdk({
81-
...params,
82-
messages,
83-
model: useGPT4oInsteadOfClaude
84-
? openaiModels.gpt4o
85-
: openrouterModels.openrouter_claude_3_5_haiku,
86-
})
92+
return unwrapPromptResult(
93+
await promptAiSdk({
94+
...params,
95+
messages,
96+
model: useGPT4oInsteadOfClaude
97+
? openaiModels.gpt4o
98+
: openrouterModels.openrouter_claude_3_5_haiku,
99+
}),
100+
)
87101
}
88102
}
Lines changed: 42 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,19 @@
11
import { models } from '@codebuff/common/old-constants'
22
import { buildArray } from '@codebuff/common/util/array'
3+
import { isAbortError, unwrapPromptResult } from '@codebuff/common/util/error'
34
import { parseMarkdownCodeBlock } from '@codebuff/common/util/file'
45
import { assistantMessage, userMessage } from '@codebuff/common/util/messages'
56

67
import type { PromptAiSdkFn } from '@codebuff/common/types/contracts/llm'
78
import type { Logger } from '@codebuff/common/types/contracts/logger'
89
import type { ParamsExcluding } from '@codebuff/common/types/function-params'
910

11+
/**
12+
* Applies code edits using Relace AI, with fallback to o3-mini on failure.
13+
*
14+
* @returns The updated code with edits applied.
15+
* @throws {Error} When the request is aborted by user. Check with `isAbortError()`. Aborts are not retried.
16+
*/
1017
export async function promptRelaceAI(
1118
params: {
1219
initialCode: string
@@ -21,24 +28,30 @@ export async function promptRelaceAI(
2128
try {
2229
const { tools: _tools, ...rest } = params
2330
// const model = 'relace-apply-2.5-lite'
24-
const content = await promptAiSdk({
25-
...rest,
26-
model: 'relace/relace-apply-3',
27-
messages: [
28-
userMessage(
29-
buildArray(
30-
instructions && `<instruction>${instructions}</instruction>`,
31-
`<code>${initialCode}</code>`,
32-
`<update>${editSnippet}</update>`,
33-
).join('\n'),
34-
),
35-
],
36-
system: undefined,
37-
includeCacheControl: false,
38-
})
39-
40-
return content + '\n'
31+
return (
32+
unwrapPromptResult(
33+
await promptAiSdk({
34+
...rest,
35+
model: 'relace/relace-apply-3',
36+
messages: [
37+
userMessage(
38+
buildArray(
39+
instructions && `<instruction>${instructions}</instruction>`,
40+
`<code>${initialCode}</code>`,
41+
`<update>${editSnippet}</update>`,
42+
).join('\n'),
43+
),
44+
],
45+
system: undefined,
46+
includeCacheControl: false,
47+
}),
48+
) + '\n'
49+
)
4150
} catch (error) {
51+
// Don't fall back on user-initiated aborts - propagate immediately
52+
if (isAbortError(error)) {
53+
throw error
54+
}
4255
logger.error(
4356
{
4457
error:
@@ -49,7 +62,7 @@ export async function promptRelaceAI(
4962
'Error calling Relace AI, falling back to o3-mini',
5063
)
5164

52-
// Fall back to Gemini
65+
// Fall back to o3-mini
5366
const prompt = `You are an expert programmer. Please rewrite this code file to implement the edit snippet while preserving as much of the original code and behavior as possible.
5467
5568
Initial code:
@@ -70,12 +83,16 @@ Important:
7083
7184
Please output just the complete updated file content with no other text.`
7285

73-
const content = await promptAiSdk({
74-
...params,
75-
messages: [userMessage(prompt), assistantMessage('```\n')],
76-
model: models.o3mini,
77-
})
78-
79-
return parseMarkdownCodeBlock(content) + '\n'
86+
return (
87+
parseMarkdownCodeBlock(
88+
unwrapPromptResult(
89+
await promptAiSdk({
90+
...params,
91+
messages: [userMessage(prompt), assistantMessage('```\n')],
92+
model: models.o3mini,
93+
}),
94+
),
95+
) + '\n'
96+
)
8097
}
8198
}

0 commit comments

Comments
 (0)