Skip to content

Commit ef363c7

Browse files
committed
feat: update dependencies and refactor stream encoding types for improved clarity
1 parent b73adfc commit ef363c7

5 files changed

Lines changed: 47 additions & 34 deletions

File tree

packages/services/service-ai/package.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,10 @@
1818
"test": "vitest run"
1919
},
2020
"dependencies": {
21+
"@ai-sdk/provider": "^2.0.0",
2122
"@objectstack/core": "workspace:*",
22-
"@objectstack/spec": "workspace:*"
23+
"@objectstack/spec": "workspace:*",
24+
"ai": "^6.0.0"
2325
},
2426
"devDependencies": {
2527
"@types/node": "^25.5.0",

packages/services/service-ai/src/__tests__/vercel-stream-encoder.test.ts

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -44,10 +44,10 @@ describe('encodeStreamPart', () => {
4444
});
4545
});
4646

47-
it('should encode tool-call-streaming-start as "b:" frame', () => {
47+
it('should encode tool-input-start as "b:" frame', () => {
4848
const part = {
49-
type: 'tool-call-streaming-start',
50-
toolCallId: 'call_2',
49+
type: 'tool-input-start',
50+
id: 'call_2',
5151
toolName: 'search',
5252
} as TextStreamPart<ToolSet>;
5353

@@ -61,11 +61,11 @@ describe('encodeStreamPart', () => {
6161
});
6262
});
6363

64-
it('should encode tool-call-delta as "c:" frame', () => {
64+
it('should encode tool-input-delta as "c:" frame', () => {
6565
const part = {
66-
type: 'tool-call-delta',
67-
toolCallId: 'call_2',
68-
argsTextDelta: '{"query":',
66+
type: 'tool-input-delta',
67+
id: 'call_2',
68+
delta: '{"query":',
6969
} as TextStreamPart<ToolSet>;
7070

7171
const frame = encodeStreamPart(part);
@@ -83,7 +83,7 @@ describe('encodeStreamPart', () => {
8383
type: 'tool-result',
8484
toolCallId: 'call_1',
8585
toolName: 'get_weather',
86-
result: { temperature: 72 },
86+
output: { temperature: 72 },
8787
} as TextStreamPart<ToolSet>;
8888

8989
const frame = encodeStreamPart(part);
@@ -112,20 +112,18 @@ describe('encodeStreamPart', () => {
112112
expect(payload.usage).toEqual({ promptTokens: 10, completionTokens: 20, totalTokens: 30 });
113113
});
114114

115-
it('should encode step-finish as "e:" frame', () => {
115+
it('should encode finish-step as "e:" frame', () => {
116116
const part = {
117-
type: 'step-finish',
117+
type: 'finish-step',
118118
finishReason: 'tool-calls',
119-
totalUsage: { promptTokens: 5, completionTokens: 10, totalTokens: 15 },
120-
isContinued: true,
119+
usage: { promptTokens: 5, completionTokens: 10, totalTokens: 15 },
121120
} as unknown as TextStreamPart<ToolSet>;
122121

123122
const frame = encodeStreamPart(part);
124123
expect(frame.startsWith('e:')).toBe(true);
125124

126125
const payload = JSON.parse(frame.slice(2));
127126
expect(payload.finishReason).toBe('tool-calls');
128-
expect(payload.isContinued).toBe(true);
129127
});
130128

131129
it('should return empty string for unknown event types', () => {
@@ -210,7 +208,7 @@ describe('encodeVercelDataStream', () => {
210208
type: 'tool-result',
211209
toolCallId: 'call_1',
212210
toolName: 'search',
213-
result: { hits: 42 },
211+
output: { hits: 42 },
214212
} as TextStreamPart<ToolSet>;
215213
yield {
216214
type: 'finish',

packages/services/service-ai/src/adapters/vercel-adapter.ts

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import type {
99
} from '@objectstack/spec/contracts';
1010
import type { LLMAdapter } from '@objectstack/spec/contracts';
1111
import type { AIToolDefinition } from '@objectstack/spec/contracts';
12-
import type { LanguageModelV3 } from '@ai-sdk/provider';
12+
import type { LanguageModelV2 } from '@ai-sdk/provider';
1313
import { generateText, streamText, tool as vercelTool, jsonSchema } from 'ai';
1414

1515
/**
@@ -63,7 +63,7 @@ function buildVercelOptions(options?: AIRequestOptions): Record<string, unknown>
6363
export class VercelLLMAdapter implements LLMAdapter {
6464
readonly name = 'vercel';
6565

66-
private readonly model: LanguageModelV3;
66+
private readonly model: LanguageModelV2;
6767

6868
constructor(config: VercelLLMAdapterConfig) {
6969
this.model = config.model;
@@ -81,9 +81,9 @@ export class VercelLLMAdapter implements LLMAdapter {
8181
model: result.response?.modelId,
8282
toolCalls: result.toolCalls?.length ? result.toolCalls : undefined,
8383
usage: result.usage ? {
84-
promptTokens: result.usage.promptTokens,
85-
completionTokens: result.usage.completionTokens,
86-
totalTokens: result.usage.totalTokens,
84+
promptTokens: result.usage.inputTokens ?? 0,
85+
completionTokens: result.usage.outputTokens ?? 0,
86+
totalTokens: result.usage.totalTokens ?? 0,
8787
} : undefined,
8888
};
8989
}
@@ -99,9 +99,9 @@ export class VercelLLMAdapter implements LLMAdapter {
9999
content: result.text,
100100
model: result.response?.modelId,
101101
usage: result.usage ? {
102-
promptTokens: result.usage.promptTokens,
103-
completionTokens: result.usage.completionTokens,
104-
totalTokens: result.usage.totalTokens,
102+
promptTokens: result.usage.inputTokens ?? 0,
103+
completionTokens: result.usage.outputTokens ?? 0,
104+
totalTokens: result.usage.totalTokens ?? 0,
105105
} : undefined,
106106
};
107107
}
@@ -121,7 +121,7 @@ export class VercelLLMAdapter implements LLMAdapter {
121121
}
122122
}
123123

124-
async embed(input: string | string[]): Promise<number[][]> {
124+
async embed(_input: string | string[]): Promise<number[][]> {
125125
// Vercel AI SDK uses a separate EmbeddingModel — not supported via this adapter.
126126
throw new Error(
127127
'[VercelLLMAdapter] Embeddings require a dedicated EmbeddingModel. ' +
@@ -144,5 +144,5 @@ export interface VercelLLMAdapterConfig {
144144
*
145145
* @example `openai('gpt-4o')` or `anthropic('claude-sonnet-4-20250514')`
146146
*/
147-
model: LanguageModelV3;
147+
model: LanguageModelV2;
148148
}

packages/services/service-ai/src/stream/vercel-stream-encoder.ts

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -46,22 +46,22 @@ export function encodeStreamPart(part: TextStreamPart<ToolSet>): string {
4646
args: part.input,
4747
})}\n`;
4848

49-
case 'tool-call-streaming-start':
49+
case 'tool-input-start':
5050
return `b:${JSON.stringify({
51-
toolCallId: part.toolCallId,
51+
toolCallId: part.id,
5252
toolName: part.toolName,
5353
})}\n`;
5454

55-
case 'tool-call-delta':
55+
case 'tool-input-delta':
5656
return `c:${JSON.stringify({
57-
toolCallId: part.toolCallId,
58-
argsTextDelta: part.argsTextDelta,
57+
toolCallId: part.id,
58+
argsTextDelta: part.delta,
5959
})}\n`;
6060

6161
case 'tool-result':
6262
return `a:${JSON.stringify({
6363
toolCallId: part.toolCallId,
64-
result: part.result,
64+
result: part.output,
6565
})}\n`;
6666

6767
// ── Finish / Step ────────────────────────────────────────
@@ -71,11 +71,10 @@ export function encodeStreamPart(part: TextStreamPart<ToolSet>): string {
7171
usage: part.totalUsage ?? undefined,
7272
})}\n`;
7373

74-
case 'step-finish':
74+
case 'finish-step':
7575
return `e:${JSON.stringify({
7676
finishReason: part.finishReason,
77-
usage: part.totalUsage ?? undefined,
78-
isContinued: part.isContinued ?? false,
77+
usage: part.usage ?? undefined,
7978
})}\n`;
8079

8180
// ── Unhandled types (silently skip) ──────────────────────

pnpm-lock.yaml

Lines changed: 14 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)