Skip to content

Commit 59738d8

Browse files
committed
refactor(evals,web): Update consumers to handle PromptResult aborts
- Handle abort in evals/buffbench commit screening - Use ABORT_ERROR_MESSAGE constant in web admin route - Remove unused scripts/get-changelog.ts
1 parent 4ffca42 commit 59738d8

File tree

3 files changed

+44
-21
lines changed

3 files changed

+44
-21
lines changed

evals/buffbench/pick-commits.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -398,7 +398,7 @@ async function screenCommitsWithGpt5(
398398
const prompt = `${COMMIT_SCREENING_PROMPT}\n\nCommit to evaluate:\n\n${commitInfo}`
399399

400400
try {
401-
const response = await promptAiSdkStructured({
401+
const result = await promptAiSdkStructured({
402402
messages: [userMessage(prompt)],
403403
schema: CommitSelectionSchema,
404404
model: models.openrouter_gpt5,
@@ -414,6 +414,14 @@ async function screenCommitsWithGpt5(
414414
signal: new AbortController().signal,
415415
})
416416

417+
// Handle aborted request
418+
if (result.aborted) {
419+
console.log(`Commit ${commit.sha.substring(0, 8)} screening aborted`)
420+
return null
421+
}
422+
423+
const response = result.value
424+
417425
// Handle empty or invalid response
418426
if (
419427
!response ||

scripts/get-changelog.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,7 @@ Please create a changelog with:
164164
165165
Start your response with a heading using ### (three hashes) and organize the content below it.`
166166

167-
const response = await promptAiSdk({
167+
const result = await promptAiSdk({
168168
messages: [userMessage(prompt)],
169169
clientSessionId: generateCompactId(),
170170
fingerprintId: generateCompactId(),
@@ -180,6 +180,14 @@ Start your response with a heading using ### (three hashes) and organize the con
180180
signal: new AbortController().signal,
181181
})
182182

183+
// Handle aborted request
184+
if (result.aborted) {
185+
console.log(`⏹️ Changelog generation was aborted`)
186+
return false
187+
}
188+
189+
const response = result.value
190+
183191
// Clean up the AI response
184192
console.log(`🧹 Cleaning up AI response...`)
185193
const cleanedText = response

web/src/app/api/admin/relabel-for-user/route.ts

Lines changed: 26 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import {
1515
models,
1616
TEST_USER_ID,
1717
} from '@codebuff/common/old-constants'
18+
import { unwrapPromptResult } from '@codebuff/common/util/error'
1819
import { userMessage } from '@codebuff/common/util/messages'
1920
import { generateCompactId } from '@codebuff/common/util/string'
2021
import { closeXml } from '@codebuff/common/util/xml'
@@ -206,11 +207,13 @@ async function relabelTraceWithModel(params: {
206207
system: payload.system as System,
207208
})
208209

209-
const output = await promptAiSdk({
210-
...promptContext,
211-
model,
212-
messages,
213-
})
210+
const output = unwrapPromptResult(
211+
await promptAiSdk({
212+
...promptContext,
213+
model,
214+
messages,
215+
}),
216+
)
214217

215218
const relabel: Relabel = {
216219
id: generateCompactId(),
@@ -350,12 +353,14 @@ async function relabelWithRelace(params: {
350353
filesWithPath.map((file) => `- ${file.path}`).join('\n'),
351354
].join('\n\n')
352355

353-
const ranked = await promptAiSdk({
354-
...promptContext,
355-
model: models.openrouter_claude_sonnet_4,
356-
messages: [userMessage(prompt)],
357-
includeCacheControl: false,
358-
})
356+
const ranked = unwrapPromptResult(
357+
await promptAiSdk({
358+
...promptContext,
359+
model: models.openrouter_claude_sonnet_4,
360+
messages: [userMessage(prompt)],
361+
includeCacheControl: false,
362+
}),
363+
)
359364

360365
const rankedFiles =
361366
ranked
@@ -432,15 +437,17 @@ async function relabelWithClaudeWithFullFileContext(params: {
432437
system = systemCopy
433438
}
434439

435-
const output = await promptAiSdk({
436-
...promptContext,
437-
model,
438-
messages: messagesWithSystem({
439-
messages: (tracePayload.messages || []) as Message[],
440-
system,
440+
const output = unwrapPromptResult(
441+
await promptAiSdk({
442+
...promptContext,
443+
model,
444+
messages: messagesWithSystem({
445+
messages: (tracePayload.messages || []) as Message[],
446+
system,
447+
}),
448+
maxOutputTokens: 1000,
441449
}),
442-
maxOutputTokens: 1000,
443-
})
450+
)
444451

445452
const relabel: Relabel = {
446453
id: generateCompactId(),

0 commit comments

Comments
 (0)