diff --git a/.changeset/shy-ravens-sink.md b/.changeset/shy-ravens-sink.md new file mode 100644 index 00000000..701eef90 --- /dev/null +++ b/.changeset/shy-ravens-sink.md @@ -0,0 +1,10 @@ +--- +'@tanstack/ai-client': minor +'@tanstack/ai-preact': minor +'@tanstack/ai-svelte': minor +'@tanstack/ai-react': minor +'@tanstack/ai-solid': minor +'@tanstack/ai-vue': minor +--- + +Added status property to useChat to track the generation lifecycle (ready, submitted, streaming, error) diff --git a/packages/typescript/ai-client/src/chat-client.ts b/packages/typescript/ai-client/src/chat-client.ts index bb0a4693..e89761f3 100644 --- a/packages/typescript/ai-client/src/chat-client.ts +++ b/packages/typescript/ai-client/src/chat-client.ts @@ -1,18 +1,19 @@ +import type { AnyClientTool, ModelMessage, StreamChunk } from '@tanstack/ai' import { StreamProcessor, generateMessageId, normalizeToUIMessage, } from '@tanstack/ai' +import type { ConnectionAdapter } from './connection-adapters' +import type { ChatClientEventEmitter } from './events' import { DefaultChatClientEventEmitter } from './events' import type { ChatClientOptions, + ChatClientState, MessagePart, ToolCallPart, UIMessage, } from './types' -import type { AnyClientTool, ModelMessage, StreamChunk } from '@tanstack/ai' -import type { ConnectionAdapter } from './connection-adapters' -import type { ChatClientEventEmitter } from './events' export class ChatClient { private processor: StreamProcessor @@ -21,6 +22,7 @@ export class ChatClient { private body: Record = {} private isLoading = false private error: Error | undefined = undefined + private status: ChatClientState = 'ready' private abortController: AbortController | null = null private events: ChatClientEventEmitter private clientToolsRef: { current: Map } @@ -37,6 +39,7 @@ export class ChatClient { onMessagesChange: (messages: Array) => void onLoadingChange: (isLoading: boolean) => void onErrorChange: (error: Error | undefined) => void + onStatusChange: (status: ChatClientState) => void } } @@ -63,6 +66,7 @@ export class ChatClient { onMessagesChange: options.onMessagesChange || (() => {}), onLoadingChange: options.onLoadingChange || (() => {}), onErrorChange: options.onErrorChange || (() => {}), + onStatusChange: options.onStatusChange || (() => {}), }, } @@ -75,13 +79,15 @@ export class ChatClient { this.callbacksRef.current.onMessagesChange(messages) }, onStreamStart: () => { - // Stream started + this.setStatus('streaming') }, onStreamEnd: (message: UIMessage) => { this.callbacksRef.current.onFinish(message) + this.setStatus('ready') }, onError: (error: Error) => { this.setError(error) + this.setStatus('error') this.callbacksRef.current.onError(error) }, onTextUpdate: (messageId: string, content: string) => { @@ -187,6 +193,11 @@ export class ChatClient { this.events.loadingChanged(isLoading) } + private setStatus(status: ChatClientState): void { + this.status = status + this.callbacksRef.current.onStatusChange(status) + } + private setError(error: Error | undefined): void { this.error = error this.callbacksRef.current.onErrorChange(error) @@ -295,6 +306,7 @@ export class ChatClient { */ private async streamResponse(): Promise { this.setIsLoading(true) + this.setStatus('submitted') this.setError(undefined) this.abortController = new AbortController() @@ -325,6 +337,7 @@ export class ChatClient { return } this.setError(err) + this.setStatus('error') this.callbacksRef.current.onError(err) } } finally { @@ -368,6 +381,7 @@ export class ChatClient { this.abortController = null } this.setIsLoading(false) + this.setStatus('ready') this.events.stopped() } @@ -502,6 +516,13 @@ export class ChatClient { return this.isLoading } + /** + * Get current status + */ + getStatus(): ChatClientState { + return this.status + } + /** * Get current error */ diff --git a/packages/typescript/ai-client/src/index.ts b/packages/typescript/ai-client/src/index.ts index 5bc664c0..e1da2705 100644 --- a/packages/typescript/ai-client/src/index.ts +++ b/packages/typescript/ai-client/src/index.ts @@ -11,6 +11,7 @@ export type { ChatClientOptions, ChatRequestBody, InferChatMessages, + ChatClientState, } from './types' export { clientTools, createChatClientOptions } from './types' export type { diff --git a/packages/typescript/ai-client/src/types.ts b/packages/typescript/ai-client/src/types.ts index 4f83debb..8aff9e4d 100644 --- a/packages/typescript/ai-client/src/types.ts +++ b/packages/typescript/ai-client/src/types.ts @@ -26,6 +26,11 @@ export type ToolResultState = | 'complete' // Result is complete | 'error' // Error occurred +/** + * ChatClient state - track the lifecycle of a chat + */ +export type ChatClientState = 'ready' | 'submitted' | 'streaming' | 'error' + /** * Message parts - building blocks of UIMessage */ @@ -191,6 +196,11 @@ export interface ChatClientOptions< */ onErrorChange?: (error: Error | undefined) => void + /** + * Callback when chat status changes + */ + onStatusChange?: (status: ChatClientState) => void + /** * Client-side tools with execution logic * When provided, tools with execute functions will be called automatically diff --git a/packages/typescript/ai-client/tests/chat-client.test.ts b/packages/typescript/ai-client/tests/chat-client.test.ts index 34476519..651d58b4 100644 --- a/packages/typescript/ai-client/tests/chat-client.test.ts +++ b/packages/typescript/ai-client/tests/chat-client.test.ts @@ -1,12 +1,12 @@ import { describe, expect, it, vi } from 'vitest' import { ChatClient } from '../src/chat-client' +import type { UIMessage } from '../src/types' import { createMockConnectionAdapter, createTextChunks, createThinkingChunks, createToolCallChunks, } from './test-utils' -import type { UIMessage } from '../src/types' describe('ChatClient', () => { describe('constructor', () => { @@ -280,6 +280,7 @@ describe('ChatClient', () => { await appendPromise expect(client.getIsLoading()).toBe(false) + expect(client.getStatus()).toBe('ready') }) }) @@ -391,6 +392,36 @@ describe('ChatClient', () => { }) }) + describe('status', () => { + + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const adapter = createMockConnectionAdapter({ + chunks, + chunkDelay: 20, + }) + const statuses: Array = [] + const client = new ChatClient({ + connection: adapter, + onStatusChange: (s) => statuses.push(s), + }) + + const promise = client.sendMessage('Test') + + // Should leave ready state + expect(client.getStatus()).not.toBe('ready') + + // Should be submitted or streaming + expect(['submitted', 'streaming']).toContain(client.getStatus()) + + await promise + + expect(statuses).toContain('submitted') + expect(statuses).toContain('streaming') + expect(statuses[statuses.length - 1]).toBe('ready') + }) + }) + describe('tool calls', () => { it('should handle tool calls from stream', async () => { const chunks = createToolCallChunks([ @@ -471,6 +502,7 @@ describe('ChatClient', () => { await client.sendMessage('Hello') expect(client.getError()).toBe(error) + expect(client.getStatus()).toBe('error') }) it('should clear error on successful request', async () => { @@ -486,12 +518,14 @@ describe('ChatClient', () => { await client.sendMessage('Fail') expect(client.getError()).toBeDefined() + expect(client.getStatus()).toBe('error') // Update connection via updateOptions client.updateOptions({ connection: successAdapter }) await client.sendMessage('Success') expect(client.getError()).toBeUndefined() + expect(client.getStatus()).not.toBe('error') }) }) diff --git a/packages/typescript/ai-preact/src/types.ts b/packages/typescript/ai-preact/src/types.ts index 21333907..7679e08a 100644 --- a/packages/typescript/ai-preact/src/types.ts +++ b/packages/typescript/ai-preact/src/types.ts @@ -1,12 +1,13 @@ import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { ChatClientOptions, + ChatClientState, ChatRequestBody, UIMessage, } from '@tanstack/ai-client' // Re-export types from ai-client -export type { UIMessage, ChatRequestBody } +export type { ChatRequestBody, UIMessage } /** * Options for the useChat hook. @@ -16,6 +17,7 @@ export type { UIMessage, ChatRequestBody } * - `onMessagesChange` - Managed by Preact state (exposed as `messages`) * - `onLoadingChange` - Managed by Preact state (exposed as `isLoading`) * - `onErrorChange` - Managed by Preact state (exposed as `error`) + * - `onStatusChange` - Managed by Preact state (exposed as `status`) * * All other callbacks (onResponse, onChunk, onFinish, onError) are * passed through to the underlying ChatClient and can be used for side effects. @@ -26,7 +28,7 @@ export type { UIMessage, ChatRequestBody } export type UseChatOptions = any> = Omit< ChatClientOptions, - 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' + 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange' > export interface UseChatReturn< @@ -95,4 +97,9 @@ export interface UseChatReturn< * Clear all messages */ clear: () => void + + /** + * Current generation status + */ + status: ChatClientState } diff --git a/packages/typescript/ai-preact/src/use-chat.ts b/packages/typescript/ai-preact/src/use-chat.ts index c3f0f208..28b6a6ed 100644 --- a/packages/typescript/ai-preact/src/use-chat.ts +++ b/packages/typescript/ai-preact/src/use-chat.ts @@ -1,3 +1,5 @@ +import type { AnyClientTool, ModelMessage } from '@tanstack/ai' +import { ChatClient, type ChatClientState } from '@tanstack/ai-client' import { useCallback, useEffect, @@ -6,8 +8,6 @@ import { useRef, useState, } from 'preact/hooks' -import { ChatClient } from '@tanstack/ai-client' -import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { UIMessage, UseChatOptions, UseChatReturn } from './types' @@ -22,6 +22,7 @@ export function useChat = any>( ) const [isLoading, setIsLoading] = useState(false) const [error, setError] = useState(undefined) + const [status, setStatus] = useState('ready') // Track current messages in a ref to preserve them when client is recreated const messagesRef = useRef>>( @@ -51,8 +52,12 @@ export function useChat = any>( body: optionsRef.current.body, onResponse: optionsRef.current.onResponse, onChunk: optionsRef.current.onChunk, - onFinish: optionsRef.current.onFinish, - onError: optionsRef.current.onError, + onFinish: (message) => { + optionsRef.current.onFinish?.(message) + }, + onError: (err) => { + optionsRef.current.onError?.(err) + }, tools: optionsRef.current.tools, streamProcessor: options.streamProcessor, onMessagesChange: (newMessages: Array>) => { @@ -61,6 +66,9 @@ export function useChat = any>( onLoadingChange: (newIsLoading: boolean) => { setIsLoading(newIsLoading) }, + onStatusChange: (newStatus: ChatClientState) => { + setStatus(newStatus) + }, onErrorChange: (newError: Error | undefined) => { setError(newError) }, @@ -154,6 +162,7 @@ export function useChat = any>( stop, isLoading, error, + status, setMessages: setMessagesManually, clear, addToolResult, diff --git a/packages/typescript/ai-preact/tests/use-chat.test.ts b/packages/typescript/ai-preact/tests/use-chat.test.ts index b7bbedb6..aa21d1ca 100644 --- a/packages/typescript/ai-preact/tests/use-chat.test.ts +++ b/packages/typescript/ai-preact/tests/use-chat.test.ts @@ -1,13 +1,13 @@ -import { describe, expect, it, vi } from 'vitest' +import type { ModelMessage } from '@tanstack/ai' import { act, waitFor } from '@testing-library/preact' +import { describe, expect, it, vi } from 'vitest' +import type { UIMessage } from '../src/types' import { createMockConnectionAdapter, createTextChunks, createToolCallChunks, renderUseChat, } from './test-utils' -import type { UIMessage } from '../src/types' -import type { ModelMessage } from '@tanstack/ai' describe('useChat', () => { describe('initialization', () => { @@ -18,6 +18,7 @@ describe('useChat', () => { expect(result.current.messages).toEqual([]) expect(result.current.isLoading).toBe(false) expect(result.current.error).toBeUndefined() + expect(result.current.status).toBe('ready') }) it('should initialize with provided messages', () => { @@ -506,6 +507,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -521,6 +523,7 @@ describe('useChat', () => { result.current.stop() expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }) it('should clear loading state when stopped', async () => { @@ -547,6 +550,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -559,6 +563,39 @@ describe('useChat', () => { }) }) + describe('status', () => { + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const adapter = createMockConnectionAdapter({ + chunks, + chunkDelay: 50, + }) + const { result } = renderUseChat({ connection: adapter }) + + let sendPromise: Promise + act(() => { + sendPromise = result.current.sendMessage('Test') + }) + + // Should leave ready state + await waitFor(() => { + expect(result.current.status).not.toBe('ready') + }) + + // Should be submitted or streaming + expect(['submitted', 'streaming']).toContain(result.current.status) + + // Should return to ready eventually + await act(async () => { + await sendPromise! + }) + + await waitFor(() => { + expect(result.current.status).toBe('ready') + }) + }) + }) + describe('clear', () => { it('should clear all messages', async () => { const chunks = createTextChunks('Response') @@ -1029,6 +1066,7 @@ describe('useChat', () => { expect(result.current.error?.message).toBe('Network request failed') expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('error') }) it('should handle stream errors', async () => { @@ -1048,6 +1086,7 @@ describe('useChat', () => { }) expect(result.current.error?.message).toBe('Stream error') + expect(result.current.status).toBe('error') }) it('should clear error on successful operation', async () => { @@ -1065,6 +1104,7 @@ describe('useChat', () => { await waitFor(() => { expect(result.current.error).toBeDefined() + expect(result.current.status).toBe('error') }) // Switch to working adapter diff --git a/packages/typescript/ai-react/src/types.ts b/packages/typescript/ai-react/src/types.ts index 80eaccea..0bca9883 100644 --- a/packages/typescript/ai-react/src/types.ts +++ b/packages/typescript/ai-react/src/types.ts @@ -1,12 +1,13 @@ import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { ChatClientOptions, + ChatClientState, ChatRequestBody, UIMessage, } from '@tanstack/ai-client' // Re-export types from ai-client -export type { UIMessage, ChatRequestBody } +export type { ChatRequestBody, UIMessage } /** * Options for the useChat hook. @@ -16,6 +17,7 @@ export type { UIMessage, ChatRequestBody } * - `onMessagesChange` - Managed by React state (exposed as `messages`) * - `onLoadingChange` - Managed by React state (exposed as `isLoading`) * - `onErrorChange` - Managed by React state (exposed as `error`) + * - `onStatusChange` - Managed by React state (exposed as `status`) * * All other callbacks (onResponse, onChunk, onFinish, onError) are * passed through to the underlying ChatClient and can be used for side effects. @@ -26,7 +28,7 @@ export type { UIMessage, ChatRequestBody } export type UseChatOptions = any> = Omit< ChatClientOptions, - 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' + 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange' > export interface UseChatReturn< @@ -86,6 +88,11 @@ export interface UseChatReturn< */ error: Error | undefined + /** + * Current status of the chat client + */ + status: ChatClientState + /** * Set messages manually */ diff --git a/packages/typescript/ai-react/src/use-chat.ts b/packages/typescript/ai-react/src/use-chat.ts index f9511e41..65f6d59a 100644 --- a/packages/typescript/ai-react/src/use-chat.ts +++ b/packages/typescript/ai-react/src/use-chat.ts @@ -1,6 +1,7 @@ -import { useCallback, useEffect, useId, useMemo, useRef, useState } from 'react' -import { ChatClient } from '@tanstack/ai-client' import type { AnyClientTool, ModelMessage } from '@tanstack/ai' +import { ChatClient } from '@tanstack/ai-client' +import type { ChatClientState } from '@tanstack/ai-client' +import { useCallback, useEffect, useId, useMemo, useRef, useState } from 'react' import type { UIMessage, UseChatOptions, UseChatReturn } from './types' @@ -15,6 +16,7 @@ export function useChat = any>( ) const [isLoading, setIsLoading] = useState(false) const [error, setError] = useState(undefined) + const [status, setStatus] = useState('ready') // Track current messages in a ref to preserve them when client is recreated const messagesRef = useRef>>( @@ -50,8 +52,12 @@ export function useChat = any>( body: optionsRef.current.body, onResponse: optionsRef.current.onResponse, onChunk: optionsRef.current.onChunk, - onFinish: optionsRef.current.onFinish, - onError: optionsRef.current.onError, + onFinish: (message: UIMessage) => { + optionsRef.current.onFinish?.(message) + }, + onError: (error: Error) => { + optionsRef.current.onError?.(error) + }, tools: optionsRef.current.tools, streamProcessor: options.streamProcessor, onMessagesChange: (newMessages: Array>) => { @@ -63,6 +69,9 @@ export function useChat = any>( onErrorChange: (newError: Error | undefined) => { setError(newError) }, + onStatusChange: (status: ChatClientState) => { + setStatus(status) + }, }) }, [clientId]) @@ -154,6 +163,7 @@ export function useChat = any>( stop, isLoading, error, + status, setMessages: setMessagesManually, clear, addToolResult, diff --git a/packages/typescript/ai-react/tests/use-chat.test.ts b/packages/typescript/ai-react/tests/use-chat.test.ts index c0a9e8d2..1eba78a7 100644 --- a/packages/typescript/ai-react/tests/use-chat.test.ts +++ b/packages/typescript/ai-react/tests/use-chat.test.ts @@ -1,14 +1,13 @@ -import { describe, it, expect, vi, beforeEach } from 'vitest' +import type { ModelMessage } from '@tanstack/ai' import { waitFor } from '@testing-library/react' -import { useChat } from '../src/use-chat' +import { describe, expect, it, vi } from 'vitest' +import type { UIMessage } from '../src/types' import { - renderUseChat, createMockConnectionAdapter, createTextChunks, createToolCallChunks, + renderUseChat, } from './test-utils' -import type { UIMessage } from '../src/types' -import type { ModelMessage } from '@tanstack/ai' describe('useChat', () => { describe('initialization', () => { @@ -19,6 +18,7 @@ describe('useChat', () => { expect(result.current.messages).toEqual([]) expect(result.current.isLoading).toBe(false) expect(result.current.error).toBeUndefined() + expect(result.current.status).toBe('ready') }) it('should initialize with provided messages', () => { @@ -473,6 +473,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -488,6 +489,7 @@ describe('useChat', () => { result.current.stop() expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }) it('should clear loading state when stopped', async () => { @@ -509,6 +511,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -519,6 +522,39 @@ describe('useChat', () => { }) }) + describe('status', () => { + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const adapter = createMockConnectionAdapter({ + chunks, + chunkDelay: 50, + }) + const { result } = renderUseChat({ connection: adapter }) + + const sendPromise = result.current.sendMessage('Test') + + // Should leave ready state + await waitFor(() => { + expect(result.current.status).not.toBe('ready') + }) + + // Should be submitted or streaming + expect(['submitted', 'streaming']).toContain(result.current.status) + + // Should eventually match streaming + await waitFor(() => { + expect(result.current.status).toBe('streaming') + }) + + await sendPromise + + // Should return to ready + await waitFor(() => { + expect(result.current.status).toBe('ready') + }) + }) + }) + describe('clear', () => { it('should clear all messages', async () => { const chunks = createTextChunks('Response') diff --git a/packages/typescript/ai-solid/src/types.ts b/packages/typescript/ai-solid/src/types.ts index 45cbc9c7..050fb801 100644 --- a/packages/typescript/ai-solid/src/types.ts +++ b/packages/typescript/ai-solid/src/types.ts @@ -1,13 +1,14 @@ -import type { Accessor } from 'solid-js' import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { ChatClientOptions, + ChatClientState, ChatRequestBody, UIMessage, } from '@tanstack/ai-client' +import type { Accessor } from 'solid-js' // Re-export types from ai-client -export type { UIMessage, ChatRequestBody } +export type { ChatRequestBody, UIMessage } /** * Options for the useChat hook. @@ -17,6 +18,7 @@ export type { UIMessage, ChatRequestBody } * - `onMessagesChange` - Managed by Solid signal (exposed as `messages`) * - `onLoadingChange` - Managed by Solid signal (exposed as `isLoading`) * - `onErrorChange` - Managed by Solid signal (exposed as `error`) + * - `onStatusChange` - Managed by Solid signal (exposed as `status`) * * All other callbacks (onResponse, onChunk, onFinish, onError) are * passed through to the underlying ChatClient and can be used for side effects. @@ -27,7 +29,7 @@ export type { UIMessage, ChatRequestBody } export type UseChatOptions = any> = Omit< ChatClientOptions, - 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' + 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange' > export interface UseChatReturn< @@ -96,6 +98,11 @@ export interface UseChatReturn< * Clear all messages */ clear: () => void + + /** + * Current generation status + */ + status: Accessor } // Note: createChatClientOptions and InferChatMessages are now in @tanstack/ai-client diff --git a/packages/typescript/ai-solid/src/use-chat.ts b/packages/typescript/ai-solid/src/use-chat.ts index 2a15fb37..10201fad 100644 --- a/packages/typescript/ai-solid/src/use-chat.ts +++ b/packages/typescript/ai-solid/src/use-chat.ts @@ -4,8 +4,9 @@ import { createSignal, createUniqueId, } from 'solid-js' -import { ChatClient } from '@tanstack/ai-client' + import type { AnyClientTool, ModelMessage } from '@tanstack/ai' +import { ChatClient, type ChatClientState } from '@tanstack/ai-client' import type { UIMessage, UseChatOptions, UseChatReturn } from './types' export function useChat = any>( @@ -19,6 +20,7 @@ export function useChat = any>( ) const [isLoading, setIsLoading] = createSignal(false) const [error, setError] = createSignal(undefined) + const [status, setStatus] = createSignal('ready') // Create ChatClient instance with callbacks to sync state // Note: Options are captured at client creation time. @@ -32,8 +34,12 @@ export function useChat = any>( body: options.body, onResponse: options.onResponse, onChunk: options.onChunk, - onFinish: options.onFinish, - onError: options.onError, + onFinish: (message) => { + options.onFinish?.(message) + }, + onError: (err) => { + options.onError?.(err) + }, tools: options.tools, streamProcessor: options.streamProcessor, onMessagesChange: (newMessages: Array>) => { @@ -42,6 +48,9 @@ export function useChat = any>( onLoadingChange: (newIsLoading: boolean) => { setIsLoading(newIsLoading) }, + onStatusChange: (newStatus: ChatClientState) => { + setStatus(newStatus) + }, onErrorChange: (newError: Error | undefined) => { setError(newError) }, @@ -125,6 +134,7 @@ export function useChat = any>( stop, isLoading, error, + status, setMessages: setMessagesManually, clear, addToolResult, diff --git a/packages/typescript/ai-solid/tests/test-utils.ts b/packages/typescript/ai-solid/tests/test-utils.ts index 47680ffe..c6083516 100644 --- a/packages/typescript/ai-solid/tests/test-utils.ts +++ b/packages/typescript/ai-solid/tests/test-utils.ts @@ -7,7 +7,7 @@ export { } from '../../ai-client/tests/test-utils' import { renderHook } from '@solidjs/testing-library' -import type { UseChatOptions, UseChatReturn } from '../src/types' +import type { UseChatOptions } from '../src/types' import { useChat } from '../src/use-chat' /** @@ -34,6 +34,7 @@ export function renderUseChat(options?: UseChatOptions) { messages: hook.messages(), isLoading: hook.isLoading(), error: hook.error(), + status: hook.status(), sendMessage: hook.sendMessage, append: hook.append, reload: hook.reload, diff --git a/packages/typescript/ai-solid/tests/use-chat.test.ts b/packages/typescript/ai-solid/tests/use-chat.test.ts index a392d96f..188a5ead 100644 --- a/packages/typescript/ai-solid/tests/use-chat.test.ts +++ b/packages/typescript/ai-solid/tests/use-chat.test.ts @@ -1,13 +1,13 @@ -import { describe, it, expect, vi } from 'vitest' import { waitFor } from '@solidjs/testing-library' +import type { ModelMessage } from '@tanstack/ai' +import { describe, expect, it, vi } from 'vitest' +import type { UIMessage } from '../src/types' import { - renderUseChat, createMockConnectionAdapter, createTextChunks, createToolCallChunks, + renderUseChat, } from './test-utils' -import type { UIMessage } from '../src/types' -import type { ModelMessage } from '@tanstack/ai' describe('useChat', () => { describe('initialization', () => { @@ -18,6 +18,7 @@ describe('useChat', () => { expect(result.current.messages).toEqual([]) expect(result.current.isLoading).toBe(false) expect(result.current.error).toBeUndefined() + expect(result.current.status).toBe('ready') }) it('should initialize with provided messages', () => { @@ -472,6 +473,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -487,6 +489,7 @@ describe('useChat', () => { result.current.stop() expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }) it('should clear loading state when stopped', async () => { @@ -508,6 +511,7 @@ describe('useChat', () => { await waitFor( () => { expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }, { timeout: 1000 }, ) @@ -518,6 +522,40 @@ describe('useChat', () => { }) }) + describe('status', () => { + + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const adapter = createMockConnectionAdapter({ + chunks, + chunkDelay: 50, + }) + const { result } = renderUseChat({ connection: adapter }) + + const sendPromise = result.current.sendMessage('Test') + + // Should leave ready state + await waitFor(() => { + expect(result.current.status).not.toBe('ready') + }) + + // Should be submitted or streaming + expect(['submitted', 'streaming']).toContain(result.current.status) + + // Should eventually match streaming + await waitFor(() => { + expect(result.current.status).toBe('streaming') + }) + + await sendPromise + + // Should return to ready + await waitFor(() => { + expect(result.current.status).toBe('ready') + }) + }) + }) + describe('clear', () => { it('should clear all messages', async () => { const chunks = createTextChunks('Response') @@ -924,6 +962,7 @@ describe('useChat', () => { expect(result.current.error?.message).toBe('Network request failed') expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('error') }) it('should handle stream errors', async () => { @@ -938,6 +977,7 @@ describe('useChat', () => { await waitFor(() => { expect(result.current.error).toBeDefined() + expect(result.current.status).toBe('error') }) expect(result.current.error?.message).toBe('Stream error') @@ -956,6 +996,7 @@ describe('useChat', () => { await waitFor(() => { expect(result.current.error).toBeDefined() + expect(result.current.status).toBe('error') }) // Switch to working adapter diff --git a/packages/typescript/ai-svelte/src/create-chat.svelte.ts b/packages/typescript/ai-svelte/src/create-chat.svelte.ts index c4081d27..e501758d 100644 --- a/packages/typescript/ai-svelte/src/create-chat.svelte.ts +++ b/packages/typescript/ai-svelte/src/create-chat.svelte.ts @@ -1,5 +1,5 @@ -import { ChatClient } from '@tanstack/ai-client' import type { AnyClientTool, ModelMessage } from '@tanstack/ai' +import { ChatClient, type ChatClientState } from '@tanstack/ai-client' import type { CreateChatOptions, CreateChatReturn, UIMessage } from './types' /** @@ -44,6 +44,7 @@ export function createChat = any>( let messages = $state>>(options.initialMessages || []) let isLoading = $state(false) let error = $state(undefined) + let status = $state('ready') // Create ChatClient instance const client = new ChatClient({ @@ -53,8 +54,12 @@ export function createChat = any>( body: options.body, onResponse: options.onResponse, onChunk: options.onChunk, - onFinish: options.onFinish, - onError: options.onError, + onFinish: (message) => { + options.onFinish?.(message) + }, + onError: (err) => { + options.onError?.(err) + }, tools: options.tools, streamProcessor: options.streamProcessor, onMessagesChange: (newMessages: Array>) => { @@ -63,6 +68,9 @@ export function createChat = any>( onLoadingChange: (newIsLoading: boolean) => { isLoading = newIsLoading }, + onStatusChange: (newStatus: ChatClientState) => { + status = newStatus + }, onErrorChange: (newError: Error | undefined) => { error = newError }, @@ -127,6 +135,9 @@ export function createChat = any>( get error() { return error }, + get status() { + return status + }, sendMessage, append, reload, diff --git a/packages/typescript/ai-svelte/src/types.ts b/packages/typescript/ai-svelte/src/types.ts index 5d07e34f..7c869ece 100644 --- a/packages/typescript/ai-svelte/src/types.ts +++ b/packages/typescript/ai-svelte/src/types.ts @@ -1,12 +1,13 @@ import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { ChatClientOptions, + ChatClientState, ChatRequestBody, UIMessage, } from '@tanstack/ai-client' // Re-export types from ai-client -export type { UIMessage, ChatRequestBody } +export type { ChatRequestBody, UIMessage } /** * Options for the createChat function. @@ -16,6 +17,7 @@ export type { UIMessage, ChatRequestBody } * - `onMessagesChange` - Managed by Svelte state (exposed as `messages`) * - `onLoadingChange` - Managed by Svelte state (exposed as `isLoading`) * - `onErrorChange` - Managed by Svelte state (exposed as `error`) + * - `onStatusChange` - Managed by Svelte state (exposed as `status`) * * All other callbacks (onResponse, onChunk, onFinish, onError) are * passed through to the underlying ChatClient and can be used for side effects. @@ -27,7 +29,7 @@ export type CreateChatOptions< TTools extends ReadonlyArray = any, > = Omit< ChatClientOptions, - 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' + 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange' > export interface CreateChatReturn< @@ -96,6 +98,11 @@ export interface CreateChatReturn< * Clear all messages */ clear: () => void + + /** + * Current generation status (reactive getter) + */ + readonly status: ChatClientState } // Note: createChatClientOptions and InferChatMessages are now in @tanstack/ai-client diff --git a/packages/typescript/ai-svelte/tests/use-chat.test.ts b/packages/typescript/ai-svelte/tests/use-chat.test.ts index 8431e2de..292f5324 100644 --- a/packages/typescript/ai-svelte/tests/use-chat.test.ts +++ b/packages/typescript/ai-svelte/tests/use-chat.test.ts @@ -1,6 +1,6 @@ -import { describe, it, expect, beforeEach, vi } from 'vitest' +import { beforeEach, describe, expect, it, vi } from 'vitest' import { createChat } from '../src/create-chat.svelte' -import { createMockConnectionAdapter } from './test-utils' +import { createMockConnectionAdapter, createTextChunks } from './test-utils' describe('createChat', () => { beforeEach(() => { @@ -17,6 +17,7 @@ describe('createChat', () => { expect(chat.messages).toEqual([]) expect(chat.isLoading).toBe(false) expect(chat.error).toBeUndefined() + expect(chat.status).toBe('ready') }) it('should initialize with initial messages', () => { @@ -146,4 +147,74 @@ describe('createChat', () => { expect(chat.error).toBeUndefined() expect(chat.error).toBeUndefined() }) + + it('should expose reactive status property', () => { + const mockConnection = createMockConnectionAdapter({ chunks: [] }) + + const chat = createChat({ + connection: mockConnection, + }) + + // Access status multiple times + expect(chat.status).toBe('ready') + expect(chat.status).toBe('ready') + }) + + describe('status transitions', () => { + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const mockConnection = createMockConnectionAdapter({ + chunks, + chunkDelay: 20, + }) + + const chat = createChat({ + connection: mockConnection, + }) + + const promise = chat.sendMessage('Test') + expect(chat.status).not.toBe('ready') + expect(['submitted', 'streaming']).toContain(chat.status) + + await promise + expect(chat.status).toBe('ready') + }) + + it('should transition to error on error', async () => { + const mockConnection = createMockConnectionAdapter({ + shouldError: true, + error: new Error('AI Error'), + }) + + const chat = createChat({ + connection: mockConnection, + }) + + await chat.sendMessage('Test') + expect(chat.status).toBe('error') + }) + + it('should transition to ready after stop', async () => { + const chunks = createTextChunks('Response') + const mockConnection = createMockConnectionAdapter({ + chunks, + chunkDelay: 50, + }) + + const chat = createChat({ + connection: mockConnection, + }) + + const promise = chat.sendMessage('Test') + + // Wait a bit for it to start + await new Promise((resolve) => setTimeout(resolve, 10)) + expect(chat.status).not.toBe('ready') + + chat.stop() + expect(chat.status).toBe('ready') + + await promise.catch(() => {}) + }) + }) }) diff --git a/packages/typescript/ai-vue/src/types.ts b/packages/typescript/ai-vue/src/types.ts index d1b7c5cf..7c09f103 100644 --- a/packages/typescript/ai-vue/src/types.ts +++ b/packages/typescript/ai-vue/src/types.ts @@ -1,13 +1,14 @@ -import type { DeepReadonly, ShallowRef } from 'vue' import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { ChatClientOptions, + ChatClientState, ChatRequestBody, UIMessage, } from '@tanstack/ai-client' +import type { DeepReadonly, ShallowRef } from 'vue' // Re-export types from ai-client -export type { UIMessage, ChatRequestBody } +export type { ChatRequestBody, UIMessage } /** * Options for the useChat composable. @@ -17,6 +18,7 @@ export type { UIMessage, ChatRequestBody } * - `onMessagesChange` - Managed by Vue ref (exposed as `messages`) * - `onLoadingChange` - Managed by Vue ref (exposed as `isLoading`) * - `onErrorChange` - Managed by Vue ref (exposed as `error`) + * - `onStatusChange` - Managed by Vue ref (exposed as `status`) * * All other callbacks (onResponse, onChunk, onFinish, onError) are * passed through to the underlying ChatClient and can be used for side effects. @@ -27,7 +29,7 @@ export type { UIMessage, ChatRequestBody } export type UseChatOptions = any> = Omit< ChatClientOptions, - 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' + 'onMessagesChange' | 'onLoadingChange' | 'onErrorChange' | 'onStatusChange' > export interface UseChatReturn< @@ -96,6 +98,11 @@ export interface UseChatReturn< * Clear all messages */ clear: () => void + + /** + * Current generation status + */ + status: DeepReadonly> } // Note: createChatClientOptions and InferChatMessages are now in @tanstack/ai-client diff --git a/packages/typescript/ai-vue/src/use-chat.ts b/packages/typescript/ai-vue/src/use-chat.ts index f190d0ee..41bc637e 100644 --- a/packages/typescript/ai-vue/src/use-chat.ts +++ b/packages/typescript/ai-vue/src/use-chat.ts @@ -1,6 +1,7 @@ +import type { AnyClientTool, ModelMessage } from '@tanstack/ai' +import type { ChatClientState } from '@tanstack/ai-client' import { ChatClient } from '@tanstack/ai-client' import { onScopeDispose, readonly, shallowRef, useId } from 'vue' -import type { AnyClientTool, ModelMessage } from '@tanstack/ai' import type { UIMessage, UseChatOptions, UseChatReturn } from './types' export function useChat = any>( @@ -14,6 +15,7 @@ export function useChat = any>( ) const isLoading = shallowRef(false) const error = shallowRef(undefined) + const status = shallowRef('ready') // Create ChatClient instance with callbacks to sync state const client = new ChatClient({ @@ -23,8 +25,12 @@ export function useChat = any>( body: options.body, onResponse: options.onResponse, onChunk: options.onChunk, - onFinish: options.onFinish, - onError: options.onError, + onFinish: (message) => { + options.onFinish?.(message) + }, + onError: (err) => { + options.onError?.(err) + }, tools: options.tools, streamProcessor: options.streamProcessor, onMessagesChange: (newMessages: Array>) => { @@ -33,6 +39,9 @@ export function useChat = any>( onLoadingChange: (newIsLoading: boolean) => { isLoading.value = newIsLoading }, + onStatusChange: (newStatus: ChatClientState) => { + status.value = newStatus + }, onErrorChange: (newError: Error | undefined) => { error.value = newError }, @@ -97,6 +106,7 @@ export function useChat = any>( stop, isLoading: readonly(isLoading), error: readonly(error), + status: readonly(status), setMessages: setMessagesManually, clear, addToolResult, diff --git a/packages/typescript/ai-vue/tests/test-utils.ts b/packages/typescript/ai-vue/tests/test-utils.ts index d2d3ef26..c6994c51 100644 --- a/packages/typescript/ai-vue/tests/test-utils.ts +++ b/packages/typescript/ai-vue/tests/test-utils.ts @@ -1,8 +1,8 @@ -import { defineComponent } from 'vue' +import type { UIMessage } from '@tanstack/ai-client' import { mount } from '@vue/test-utils' -import { useChat } from '../src/use-chat' +import { defineComponent } from 'vue' import type { UseChatOptions } from '../src/types' -import type { UIMessage } from '@tanstack/ai-client' +import { useChat } from '../src/use-chat' // Re-export test utilities from ai-client export { @@ -42,6 +42,7 @@ export function renderUseChat(options?: UseChatOptions) { messages: hook.messages as Array, isLoading: hook.isLoading, error: hook.error, + status: hook.status, sendMessage: hook.sendMessage, append: hook.append, reload: hook.reload, diff --git a/packages/typescript/ai-vue/tests/use-chat.test.ts b/packages/typescript/ai-vue/tests/use-chat.test.ts index 52a77555..c654f593 100644 --- a/packages/typescript/ai-vue/tests/use-chat.test.ts +++ b/packages/typescript/ai-vue/tests/use-chat.test.ts @@ -1,13 +1,13 @@ -import { describe, expect, it, vi } from 'vitest' +import type { ModelMessage } from '@tanstack/ai' import { flushPromises } from '@vue/test-utils' +import { describe, expect, it, vi } from 'vitest' +import type { UIMessage } from '../src/types' import { createMockConnectionAdapter, createTextChunks, createToolCallChunks, renderUseChat, } from './test-utils' -import type { UIMessage } from '../src/types' -import type { ModelMessage } from '@tanstack/ai' describe('useChat', () => { describe('initialization', () => { @@ -18,6 +18,7 @@ describe('useChat', () => { expect(result.current.messages).toEqual([]) expect(result.current.isLoading).toBe(false) expect(result.current.error).toBeUndefined() + expect(result.current.status).toBe('ready') }) it('should initialize with provided messages', () => { @@ -421,6 +422,7 @@ describe('useChat', () => { // Should eventually stop loading expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }) it('should be safe to call multiple times', () => { @@ -433,6 +435,7 @@ describe('useChat', () => { result.current.stop() expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') }) it('should clear loading state when stopped', async () => { @@ -456,6 +459,32 @@ describe('useChat', () => { await flushPromises() expect(result.current.isLoading).toBe(false) + expect(result.current.status).toBe('ready') + }) + }) + + describe('status', () => { + it('should transition through states during generation', async () => { + const chunks = createTextChunks('Response') + const adapter = createMockConnectionAdapter({ + chunks, + chunkDelay: 50, + }) + const { result } = renderUseChat({ connection: adapter }) + + const sendPromise = result.current.sendMessage('Test') + + // Should leave ready state + await flushPromises() + expect(result.current.status).not.toBe('ready') + + // Should be submitted or streaming + expect(['submitted', 'streaming']).toContain(result.current.status) + + // Should return to ready eventually + await sendPromise + await flushPromises() + expect(result.current.status).toBe('ready') }) })