Skip to content

Commit 2c95530

Browse files
committed
switch all instances of CoreMessage to CodebuffMessage
1 parent a509f5e commit 2c95530

20 files changed

+83
-333
lines changed

backend/src/__tests__/request-files-prompt.test.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,8 @@ import * as OriginalRequestFilesPromptModule from '../find-files/request-files-p
1414
import * as geminiWithFallbacksModule from '../llm-apis/gemini-with-fallbacks'
1515

1616
import type { CostMode } from '@codebuff/common/constants'
17+
import type { CodebuffMessage } from '@codebuff/common/types/message'
1718
import type { ProjectFileContext } from '@codebuff/common/util/file'
18-
import type { CoreMessage } from 'ai'
1919
import type { Mock } from 'bun:test'
2020

2121
// Restore module-level mocks using bunMockFn for the mock implementations
@@ -65,7 +65,9 @@ bunMockFn.module('@codebuff/bigquery', () => ({
6565
}))
6666

6767
describe('requestRelevantFiles', () => {
68-
const mockMessages: CoreMessage[] = [{ role: 'user', content: 'test prompt' }]
68+
const mockMessages: CodebuffMessage[] = [
69+
{ role: 'user', content: 'test prompt' },
70+
]
6971
const mockSystem = 'test system'
7072
const mockFileContext: ProjectFileContext = {
7173
projectRoot: '/test/project',

backend/src/fast-rewrite.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ import { promptRelaceAI } from './llm-apis/relace-api'
1111
import { promptAiSdk } from './llm-apis/vercel-ai-sdk/ai-sdk'
1212
import { logger } from './util/logger'
1313

14-
import type { CoreMessage } from 'ai'
14+
import type { CodebuffMessage } from '@codebuff/common/types/message'
1515

1616
export async function fastRewrite(
1717
initialContent: string,
@@ -135,7 +135,7 @@ export const shouldAddFilePlaceholders = async (
135135
filePath: string,
136136
oldContent: string,
137137
rewrittenNewContent: string,
138-
messageHistory: CoreMessage[],
138+
messageHistory: CodebuffMessage[],
139139
fullResponse: string,
140140
userId: string | undefined,
141141
clientSessionId: string,

backend/src/find-files/check-new-files-necessary.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,17 @@ import { models } from '@codebuff/common/constants'
22
import { closeXml } from '@codebuff/common/util/xml'
33

44
import { promptFlashWithFallbacks } from '../llm-apis/gemini-with-fallbacks'
5-
import { getCoreMessagesSubset } from '../util/messages'
5+
import { getMessagesSubset } from '../util/messages'
66

77
import type { System } from '../llm-apis/claude'
8-
import type { CoreMessage } from 'ai'
8+
import type { CodebuffMessage } from '@codebuff/common/types/message'
99

1010
const systemIntro = `
1111
You are assisting the user with their software project, in the application Codebuff. Codebuff is a coding agent that helps developers write code or perform utility tasks.
1212
`.trim()
1313

1414
export const checkNewFilesNecessary = async (
15-
messages: CoreMessage[],
15+
messages: CodebuffMessage[],
1616
system: System,
1717
clientSessionId: string,
1818
fingerprintId: string,
@@ -56,7 +56,7 @@ Answer with just 'YES' if reading new files is helpful, or 'NO' if the current f
5656
const response = await promptFlashWithFallbacks(
5757
[
5858
{ role: 'system', content: systemWithCodebuffInfo },
59-
...getCoreMessagesSubset(
59+
...getMessagesSubset(
6060
[...messages, { role: 'user', content: prompt }],
6161
bufferTokens,
6262
),

backend/src/find-files/request-files-prompt.ts

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ import { promptAiSdk } from '../llm-apis/vercel-ai-sdk/ai-sdk'
1919
import { logger } from '../util/logger'
2020
import {
2121
castAssistantMessage,
22-
coreMessagesWithSystem,
23-
getCoreMessagesSubset,
22+
messagesWithSystem,
23+
getMessagesSubset,
2424
} from '../util/messages'
2525
import { getRequestContext } from '../websockets/request-context'
2626

@@ -30,8 +30,8 @@ import type {
3030
GetExpandedFileContextForTrainingTrace,
3131
GetRelevantFilesTrace,
3232
} from '@codebuff/bigquery'
33+
import type { CodebuffMessage } from '@codebuff/common/types/message'
3334
import type { ProjectFileContext } from '@codebuff/common/util/file'
34-
import type { CoreMessage } from 'ai'
3535

3636
const NUMBER_OF_EXAMPLE_FILES = 100
3737
const MAX_FILES_PER_REQUEST = 30
@@ -124,7 +124,7 @@ export async function requestRelevantFiles(
124124
messages,
125125
system,
126126
}: {
127-
messages: CoreMessage[]
127+
messages: CodebuffMessage[]
128128
system: string | Array<TextBlock>
129129
},
130130
fileContext: ProjectFileContext,
@@ -260,7 +260,7 @@ export async function requestRelevantFilesForTraining(
260260
messages,
261261
system,
262262
}: {
263-
messages: CoreMessage[]
263+
messages: CodebuffMessage[]
264264
system: string | Array<TextBlock>
265265
},
266266
fileContext: ProjectFileContext,
@@ -341,7 +341,7 @@ async function getRelevantFiles(
341341
messages,
342342
system,
343343
}: {
344-
messages: CoreMessage[]
344+
messages: CodebuffMessage[]
345345
system: string | Array<TextBlock>
346346
},
347347
userPrompt: string,
@@ -355,7 +355,7 @@ async function getRelevantFiles(
355355
modelId?: FinetunedVertexModel,
356356
) {
357357
const bufferTokens = 100_000
358-
const messagesWithPrompt = getCoreMessagesSubset(
358+
const messagesWithPrompt = getMessagesSubset(
359359
[
360360
...messages,
361361
{
@@ -366,12 +366,12 @@ async function getRelevantFiles(
366366
bufferTokens,
367367
)
368368
const start = performance.now()
369-
let coreMessages = coreMessagesWithSystem(messagesWithPrompt, system)
369+
let codebuffMessages = messagesWithSystem(messagesWithPrompt, system)
370370

371371
// Converts assistant messages to user messages for finetuned model
372-
coreMessages = coreMessages
372+
codebuffMessages = codebuffMessages
373373
.map((msg, i) => {
374-
if (msg.role === 'assistant' && i !== coreMessages.length - 1) {
374+
if (msg.role === 'assistant' && i !== codebuffMessages.length - 1) {
375375
return castAssistantMessage(msg)
376376
} else {
377377
return msg
@@ -380,7 +380,7 @@ async function getRelevantFiles(
380380
.filter((msg) => msg !== null)
381381
const finetunedModel = modelId ?? finetunedVertexModels.ft_filepicker_010
382382

383-
let response = await promptFlashWithFallbacks(coreMessages, {
383+
let response = await promptFlashWithFallbacks(codebuffMessages, {
384384
clientSessionId,
385385
userInputId,
386386
model: models.gemini2flash,
@@ -424,7 +424,7 @@ async function getRelevantFilesForTraining(
424424
messages,
425425
system,
426426
}: {
427-
messages: CoreMessage[]
427+
messages: CodebuffMessage[]
428428
system: string | Array<TextBlock>
429429
},
430430
userPrompt: string,
@@ -437,7 +437,7 @@ async function getRelevantFilesForTraining(
437437
repoId: string | undefined,
438438
) {
439439
const bufferTokens = 100_000
440-
const messagesWithPrompt = getCoreMessagesSubset(
440+
const messagesWithPrompt = getMessagesSubset(
441441
[
442442
...messages,
443443
{
@@ -449,7 +449,7 @@ async function getRelevantFilesForTraining(
449449
)
450450
const start = performance.now()
451451
let response = await promptAiSdk({
452-
messages: coreMessagesWithSystem(messagesWithPrompt, system),
452+
messages: messagesWithSystem(messagesWithPrompt, system),
453453
clientSessionId,
454454
fingerprintId,
455455
userInputId,

backend/src/get-file-reading-updates.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ import {
1313
import { countTokensJson } from './util/token-counter'
1414
import { requestFiles } from './websockets/websocket-action'
1515

16+
import type { CodebuffMessage } from '@codebuff/common/types/message'
1617
import type { ProjectFileContext } from '@codebuff/common/util/file'
17-
import type { CoreMessage } from 'ai'
1818
import type { WebSocket } from 'ws'
1919

2020
const getInitialFiles = (fileContext: ProjectFileContext) => {
@@ -39,7 +39,7 @@ const getInitialFiles = (fileContext: ProjectFileContext) => {
3939

4040
export async function getFileReadingUpdates(
4141
ws: WebSocket,
42-
messages: CoreMessage[],
42+
messages: CodebuffMessage[],
4343
fileContext: ProjectFileContext,
4444
options: {
4545
requestedFiles?: string[]

backend/src/llm-apis/check-for-loop.ts

Lines changed: 0 additions & 65 deletions
This file was deleted.

backend/src/llm-apis/gemini-with-fallbacks.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import type {
88
FinetunedVertexModel,
99
GeminiModel,
1010
} from '@codebuff/common/constants'
11-
import type { CoreMessage } from 'ai'
11+
import type { CodebuffMessage } from '@codebuff/common/types/message'
1212

1313
/**
1414
* Prompts a Gemini model with fallback logic.
@@ -36,7 +36,7 @@ import type { CoreMessage } from 'ai'
3636
* @throws If all API calls (primary and fallbacks) fail.
3737
*/
3838
export async function promptFlashWithFallbacks(
39-
messages: CoreMessage[],
39+
messages: CodebuffMessage[],
4040
options: {
4141
clientSessionId: string
4242
fingerprintId: string

backend/src/llm-apis/message-cost-tracker.ts

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,8 @@ import { stripNullCharsFromObject } from '../util/object'
2121
import { SWITCHBOARD } from '../websockets/server'
2222
import { sendAction } from '../websockets/websocket-action'
2323

24-
import type { OpenAIMessage } from './openai-api'
2524
import type { ClientState } from '../websockets/switchboard'
26-
import type { Message } from '@codebuff/common/types/message'
27-
import type { CoreMessage } from 'ai'
25+
import type { CodebuffMessage } from '@codebuff/common/types/message'
2826

2927
export const PROFIT_MARGIN = 0.3
3028

@@ -332,7 +330,7 @@ type InsertMessageParams = {
332330
fingerprintId: string
333331
userInputId: string
334332
model: string
335-
request: Message[] | OpenAIMessage[] | CoreMessage[]
333+
request: CodebuffMessage[]
336334
response: string
337335
inputTokens: number
338336
outputTokens: number
@@ -532,7 +530,7 @@ export const saveMessage = async (value: {
532530
fingerprintId: string
533531
userInputId: string
534532
model: string
535-
request: Message[] | OpenAIMessage[] | CoreMessage[]
533+
request: CodebuffMessage[]
536534
response: string
537535
inputTokens: number
538536
outputTokens: number

0 commit comments

Comments
 (0)