Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions .changeset/shy-ravens-sink.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
'@tanstack/ai-client': minor
'@tanstack/ai-preact': minor
'@tanstack/ai-svelte': minor
'@tanstack/ai-react': minor
'@tanstack/ai-solid': minor
'@tanstack/ai-vue': minor
---

Added status property to useChat to track the generation lifecycle (ready, submitted, streaming, error)
29 changes: 25 additions & 4 deletions packages/typescript/ai-client/src/chat-client.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
import type { AnyClientTool, ModelMessage, StreamChunk } from '@tanstack/ai'
import {
StreamProcessor,
generateMessageId,
normalizeToUIMessage,
} from '@tanstack/ai'
import type { ConnectionAdapter } from './connection-adapters'
import type { ChatClientEventEmitter } from './events'
import { DefaultChatClientEventEmitter } from './events'
import type {
ChatClientOptions,
ChatClientState,
MessagePart,
ToolCallPart,
UIMessage,
} from './types'
import type { AnyClientTool, ModelMessage, StreamChunk } from '@tanstack/ai'
import type { ConnectionAdapter } from './connection-adapters'
import type { ChatClientEventEmitter } from './events'

export class ChatClient {
private processor: StreamProcessor
Expand All @@ -21,6 +22,7 @@ export class ChatClient {
private body: Record<string, any> = {}
private isLoading = false
private error: Error | undefined = undefined
private status: ChatClientState = 'ready'
private abortController: AbortController | null = null
private events: ChatClientEventEmitter
private clientToolsRef: { current: Map<string, AnyClientTool> }
Expand All @@ -37,6 +39,7 @@ export class ChatClient {
onMessagesChange: (messages: Array<UIMessage>) => void
onLoadingChange: (isLoading: boolean) => void
onErrorChange: (error: Error | undefined) => void
onStatusChange: (status: ChatClientState) => void
}
}

Expand All @@ -63,6 +66,7 @@ export class ChatClient {
onMessagesChange: options.onMessagesChange || (() => {}),
onLoadingChange: options.onLoadingChange || (() => {}),
onErrorChange: options.onErrorChange || (() => {}),
onStatusChange: options.onStatusChange || (() => {}),
},
}

Expand All @@ -75,13 +79,15 @@ export class ChatClient {
this.callbacksRef.current.onMessagesChange(messages)
},
onStreamStart: () => {
// Stream started
this.setStatus('streaming')
},
onStreamEnd: (message: UIMessage) => {
this.callbacksRef.current.onFinish(message)
this.setStatus('ready')
},
onError: (error: Error) => {
this.setError(error)
this.setStatus('error')
this.callbacksRef.current.onError(error)
},
onTextUpdate: (messageId: string, content: string) => {
Expand Down Expand Up @@ -187,6 +193,11 @@ export class ChatClient {
this.events.loadingChanged(isLoading)
}

private setStatus(status: ChatClientState): void {
this.status = status
this.callbacksRef.current.onStatusChange(status)
}

private setError(error: Error | undefined): void {
this.error = error
this.callbacksRef.current.onErrorChange(error)
Expand Down Expand Up @@ -295,6 +306,7 @@ export class ChatClient {
*/
private async streamResponse(): Promise<void> {
this.setIsLoading(true)
this.setStatus('submitted')
this.setError(undefined)
this.abortController = new AbortController()

Expand Down Expand Up @@ -325,6 +337,7 @@ export class ChatClient {
return
}
this.setError(err)
this.setStatus('error')
this.callbacksRef.current.onError(err)
}
} finally {
Expand Down Expand Up @@ -368,6 +381,7 @@ export class ChatClient {
this.abortController = null
}
this.setIsLoading(false)
this.setStatus('ready')
this.events.stopped()
}

Expand Down Expand Up @@ -502,6 +516,13 @@ export class ChatClient {
return this.isLoading
}

/**
* Get current status
*/
getStatus(): ChatClientState {
return this.status
}

/**
* Get current error
*/
Expand Down
1 change: 1 addition & 0 deletions packages/typescript/ai-client/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ export type {
ChatClientOptions,
ChatRequestBody,
InferChatMessages,
ChatClientState,
} from './types'
export { clientTools, createChatClientOptions } from './types'
export type {
Expand Down
10 changes: 10 additions & 0 deletions packages/typescript/ai-client/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ export type ToolResultState =
| 'complete' // Result is complete
| 'error' // Error occurred

/**
* ChatClient state - track the lifecycle of a chat
*/
export type ChatClientState = 'ready' | 'submitted' | 'streaming' | 'error'

/**
* Message parts - building blocks of UIMessage
*/
Expand Down Expand Up @@ -191,6 +196,11 @@ export interface ChatClientOptions<
*/
onErrorChange?: (error: Error | undefined) => void

/**
* Callback when chat status changes
*/
onStatusChange?: (status: ChatClientState) => void

/**
* Client-side tools with execution logic
* When provided, tools with execute functions will be called automatically
Expand Down
36 changes: 35 additions & 1 deletion packages/typescript/ai-client/tests/chat-client.test.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import { describe, expect, it, vi } from 'vitest'
import { ChatClient } from '../src/chat-client'
import type { UIMessage } from '../src/types'
import {
createMockConnectionAdapter,
createTextChunks,
createThinkingChunks,
createToolCallChunks,
} from './test-utils'
import type { UIMessage } from '../src/types'

describe('ChatClient', () => {
describe('constructor', () => {
Expand Down Expand Up @@ -280,6 +280,7 @@ describe('ChatClient', () => {
await appendPromise

expect(client.getIsLoading()).toBe(false)
expect(client.getStatus()).toBe('ready')
})
})

Expand Down Expand Up @@ -391,6 +392,36 @@ describe('ChatClient', () => {
})
})

describe('status', () => {

it('should transition through states during generation', async () => {
const chunks = createTextChunks('Response')
const adapter = createMockConnectionAdapter({
chunks,
chunkDelay: 20,
})
const statuses: Array<string> = []
const client = new ChatClient({
connection: adapter,
onStatusChange: (s) => statuses.push(s),
})

const promise = client.sendMessage('Test')

// Should leave ready state
expect(client.getStatus()).not.toBe('ready')

// Should be submitted or streaming
expect(['submitted', 'streaming']).toContain(client.getStatus())

await promise

expect(statuses).toContain('submitted')
expect(statuses).toContain('streaming')
expect(statuses[statuses.length - 1]).toBe('ready')
})
})

describe('tool calls', () => {
it('should handle tool calls from stream', async () => {
const chunks = createToolCallChunks([
Expand Down Expand Up @@ -471,6 +502,7 @@ describe('ChatClient', () => {
await client.sendMessage('Hello')

expect(client.getError()).toBe(error)
expect(client.getStatus()).toBe('error')
})

it('should clear error on successful request', async () => {
Expand All @@ -486,12 +518,14 @@ describe('ChatClient', () => {

await client.sendMessage('Fail')
expect(client.getError()).toBeDefined()
expect(client.getStatus()).toBe('error')

// Update connection via updateOptions
client.updateOptions({ connection: successAdapter })

await client.sendMessage('Success')
expect(client.getError()).toBeUndefined()
expect(client.getStatus()).not.toBe('error')
})
})

Expand Down
11 changes: 9 additions & 2 deletions packages/typescript/ai-preact/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import type { AnyClientTool, ModelMessage } from '@tanstack/ai'
import type {
ChatClientOptions,
ChatClientState,
ChatRequestBody,
UIMessage,
} from '@tanstack/ai-client'

// Re-export types from ai-client
export type { UIMessage, ChatRequestBody }
export type { ChatRequestBody, UIMessage }

/**
* Options for the useChat hook.
Expand All @@ -16,6 +17,7 @@ export type { UIMessage, ChatRequestBody }
* - `onMessagesChange` - Managed by Preact state (exposed as `messages`)
* - `onLoadingChange` - Managed by Preact state (exposed as `isLoading`)
* - `onErrorChange` - Managed by Preact state (exposed as `error`)
* - `onStatusChange` - Managed by Preact state (exposed as `status`)
*
* All other callbacks (onResponse, onChunk, onFinish, onError) are
* passed through to the underlying ChatClient and can be used for side effects.
Expand All @@ -26,7 +28,7 @@ export type { UIMessage, ChatRequestBody }
export type UseChatOptions<TTools extends ReadonlyArray<AnyClientTool> = any> =
Omit<
ChatClientOptions<TTools>,
'onMessagesChange' | 'onLoadingChange' | 'onErrorChange'
'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange'
>

export interface UseChatReturn<
Expand Down Expand Up @@ -95,4 +97,9 @@ export interface UseChatReturn<
* Clear all messages
*/
clear: () => void

/**
* Current generation status
*/
status: ChatClientState
}
17 changes: 13 additions & 4 deletions packages/typescript/ai-preact/src/use-chat.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import type { AnyClientTool, ModelMessage } from '@tanstack/ai'
import { ChatClient, type ChatClientState } from '@tanstack/ai-client'
import {
useCallback,
useEffect,
Expand All @@ -6,8 +8,6 @@ import {
useRef,
useState,
} from 'preact/hooks'
import { ChatClient } from '@tanstack/ai-client'
import type { AnyClientTool, ModelMessage } from '@tanstack/ai'

import type { UIMessage, UseChatOptions, UseChatReturn } from './types'

Expand All @@ -22,6 +22,7 @@ export function useChat<TTools extends ReadonlyArray<AnyClientTool> = any>(
)
const [isLoading, setIsLoading] = useState(false)
const [error, setError] = useState<Error | undefined>(undefined)
const [status, setStatus] = useState<ChatClientState>('ready')

// Track current messages in a ref to preserve them when client is recreated
const messagesRef = useRef<Array<UIMessage<TTools>>>(
Expand Down Expand Up @@ -51,8 +52,12 @@ export function useChat<TTools extends ReadonlyArray<AnyClientTool> = any>(
body: optionsRef.current.body,
onResponse: optionsRef.current.onResponse,
onChunk: optionsRef.current.onChunk,
onFinish: optionsRef.current.onFinish,
onError: optionsRef.current.onError,
onFinish: (message) => {
optionsRef.current.onFinish?.(message)
},
onError: (err) => {
optionsRef.current.onError?.(err)
},
tools: optionsRef.current.tools,
streamProcessor: options.streamProcessor,
onMessagesChange: (newMessages: Array<UIMessage<TTools>>) => {
Expand All @@ -61,6 +66,9 @@ export function useChat<TTools extends ReadonlyArray<AnyClientTool> = any>(
onLoadingChange: (newIsLoading: boolean) => {
setIsLoading(newIsLoading)
},
onStatusChange: (newStatus: ChatClientState) => {
setStatus(newStatus)
},
onErrorChange: (newError: Error | undefined) => {
setError(newError)
},
Expand Down Expand Up @@ -154,6 +162,7 @@ export function useChat<TTools extends ReadonlyArray<AnyClientTool> = any>(
stop,
isLoading,
error,
status,
setMessages: setMessagesManually,
clear,
addToolResult,
Expand Down
Loading