diff --git a/.changeset/fair-mails-pretend.md b/.changeset/fair-mails-pretend.md new file mode 100644 index 000000000000..e8d99c419348 --- /dev/null +++ b/.changeset/fair-mails-pretend.md @@ -0,0 +1,10 @@ +--- +'@ai-sdk/ui-utils': patch +'@ai-sdk/svelte': patch +'@ai-sdk/react': patch +'@ai-sdk/solid': patch +'@ai-sdk/vue': patch +'ai': patch +--- + +feat (ui): introduce message parts for useChat diff --git a/content/docs/04-ai-sdk-ui/02-chatbot.mdx b/content/docs/04-ai-sdk-ui/02-chatbot.mdx index c834ae306070..09c84aa3434a 100644 --- a/content/docs/04-ai-sdk-ui/02-chatbot.mdx +++ b/content/docs/04-ai-sdk-ui/02-chatbot.mdx @@ -65,6 +65,14 @@ export async function POST(req: Request) { } ``` + + The UI messages have a new `parts` property that contains the message parts. + We recommend rendering the messages using the `parts` property instead of the + `content` property. The parts property supports different message types, + including text, tool invocation, and tool result, and allows for more flexible + and complex chat UIs. + + In the `Page` component, the `useChat` hook will request to your AI provider endpoint whenever the user submits a message. The messages are then streamed back in real-time and displayed in the chat UI. @@ -510,14 +518,23 @@ export async function POST(req: Request) { } ``` -On the client side, you can access the reasoning text with the `reasoning` property on the message object: +On the client side, you can access the reasoning parts of the message object: -```tsx filename="app/page.tsx" highlight="4" -messages.map(m => ( -
- {m.role === 'user' ? 'User: ' : 'AI: '} - {m.reasoning &&
{m.reasoning}
} - {m.content} +```tsx filename="app/page.tsx" +messages.map(message => ( +
+ {message.role === 'user' ? 'User: ' : 'AI: '} + {message.parts.map((part, index) => { + // text parts: + if (part.type === 'text') { + return
{part.text}
; + } + + // reasoning parts: + if (part.type === 'reasoning') { + return
{part.reasoning}
; + } + })}
)); ``` diff --git a/content/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx b/content/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx index 6706763bbfa0..e42e19efb53e 100644 --- a/content/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx +++ b/content/docs/04-ai-sdk-ui/03-chatbot-tool-usage.mdx @@ -22,12 +22,12 @@ The flow is as follows: 1. Client-side tools that should be automatically executed are handled with the `onToolCall` callback. You can return the tool result from the callback. 1. Client-side tool that require user interactions can be displayed in the UI. - The tool calls and results are available in the `toolInvocations` property of the last assistant message. + The tool calls and results are available as tool invocation parts in the `parts` property of the last assistant message. 1. When the user interaction is done, `addToolResult` can be used to add the tool result to the chat. 1. When there are tool calls in the last assistant message and all tool results are available, the client sends the updated messages back to the server. This triggers another iteration of this flow. -The tool call and tool executions are integrated into the assistant message as `toolInvocations`. +The tool call and tool executions are integrated into the assistant message as tool invocation parts. A tool invocation is at first a tool call, and then it becomes a tool result when the tool is executed. The tool result contains all information about the tool call as well as the result of the tool execution. @@ -61,7 +61,7 @@ export async function POST(req: Request) { const { messages } = await req.json(); const result = streamText({ - model: openai('gpt-4-turbo'), + model: openai('gpt-4o'), messages, tools: { // server-side tool with execute function: @@ -98,7 +98,8 @@ export async function POST(req: Request) { ### Client-side page The client-side page uses the `useChat` hook to create a chatbot application with real-time message streaming. -Tool invocations are displayed in the chat UI. +Tool invocations are displayed in the chat UI as tool invocation parts. +Please make sure to render the messages using the `parts` property of the message. There are three things worth mentioning: @@ -117,7 +118,7 @@ There are three things worth mentioning: 'use client'; import { ToolInvocation } from 'ai'; -import { Message, useChat } from 'ai/react'; +import { useChat } from 'ai/react'; export default function Chat() { const { messages, input, handleInputChange, handleSubmit, addToolResult } = @@ -140,43 +141,110 @@ export default function Chat() { return ( <> - {messages?.map((m: Message) => ( -
- {m.role}: - {m.content} - {m.toolInvocations?.map((toolInvocation: ToolInvocation) => { - const toolCallId = toolInvocation.toolCallId; - const addResult = (result: string) => - addToolResult({ toolCallId, result }); - - // render confirmation tool (client-side tool with user interaction) - if (toolInvocation.toolName === 'askForConfirmation') { - return ( -
- {toolInvocation.args.message} -
- {'result' in toolInvocation ? ( - {toolInvocation.result} - ) : ( - <> - - - - )} -
-
- ); + {messages?.map(message => ( +
+ {`${message.role}: `} + {message.parts.map(part => { + switch (part.type) { + // render text parts as simple text: + case 'text': + return part.text; + + // for tool invocations, distinguish between the tools and the state: + case 'tool-invocation': { + const callId = part.toolInvocation.toolCallId; + + switch (part.toolInvocation.toolName) { + case 'askForConfirmation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ {part.toolInvocation.args.message} +
+ + +
+
+ ); + case 'result': + return ( +
+ Location access allowed:{' '} + {part.toolInvocation.result} +
+ ); + } + break; + } + + case 'getLocation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ Getting location... +
+ ); + case 'result': + return ( +
+ Location: {part.toolInvocation.result} +
+ ); + } + break; + } + + case 'getWeatherInformation': { + switch (part.toolInvocation.state) { + // example of pre-rendering streaming tool calls: + case 'partial-call': + return ( +
+                            {JSON.stringify(part.toolInvocation, null, 2)}
+                          
+ ); + case 'call': + return ( +
+ Getting weather information for{' '} + {part.toolInvocation.args.city}... +
+ ); + case 'result': + return ( +
+ Weather in {part.toolInvocation.args.city}:{' '} + {part.toolInvocation.result} +
+ ); + } + break; + } + } + } } - - // other tools: - return 'result' in toolInvocation ? ( -
- Tool call {`${toolInvocation.toolName}: `} - {toolInvocation.result} -
- ) : ( -
Calling {toolInvocation.toolName}...
- ); })}
@@ -210,7 +278,7 @@ export async function POST(req: Request) { When the flag is enabled, partial tool calls will be streamed as part of the data stream. They are available through the `useChat` hook. -The `toolInvocations` property of assistant messages will also contain partial tool calls. +The tool invocation parts of assistant messages will also contain partial tool calls. You can use the `state` property of the tool invocation to render the correct UI. ```tsx filename='app/page.tsx' highlight="9,10" @@ -218,16 +286,18 @@ export default function Chat() { // ... return ( <> - {messages?.map((m: Message) => ( -
- {m.toolInvocations?.map((toolInvocation: ToolInvocation) => { - switch (toolInvocation.state) { - case 'partial-call': - return <>render partial tool call; - case 'call': - return <>render full tool call; - case 'result': - return <>render tool result; + {messages?.map(message => ( +
+ {message.parts.map(part => { + if (part.type === 'tool-invocation') { + switch (part.toolInvocation.state) { + case 'partial-call': + return <>render partial tool call; + case 'call': + return <>render full tool call; + case 'result': + return <>render tool result; + } } })}
@@ -251,7 +321,7 @@ export async function POST(req: Request) { const { messages } = await req.json(); const result = streamText({ - model: openai('gpt-4-turbo'), + model: openai('gpt-4o'), messages, tools: { getWeatherInformation: { diff --git a/content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx b/content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx index df8166141dc1..2aba6cdd7e46 100644 --- a/content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx +++ b/content/docs/07-reference/02-ai-sdk-ui/01-use-chat.mdx @@ -211,11 +211,11 @@ Allows you to easily create a conversational user interface for your chatbot app content={[ { name: 'messages', - type: 'Message[]', + type: 'UIMessage[]', description: 'The current array of chat messages.', properties: [ { - type: 'Message', + type: 'UIMessage', parameters: [ { name: 'id', @@ -227,17 +227,6 @@ Allows you to easily create a conversational user interface for your chatbot app type: "'system' | 'user' | 'assistant' | 'data'", description: 'The role of the message.', }, - { - name: 'content', - type: 'string', - description: 'The content of the message.', - }, - { - name: 'reasoning', - type: 'string', - isOptional: true, - description: 'The reasoning of the message.', - }, { name: 'createdAt', type: 'Date', @@ -245,16 +234,9 @@ Allows you to easily create a conversational user interface for your chatbot app description: 'The creation date of the message.', }, { - name: 'name', + name: 'content', type: 'string', - isOptional: true, - description: 'The name of the message.', - }, - { - name: 'data', - type: 'JSONValue', - isOptional: true, - description: 'Additional data sent along with the message.', + description: 'The content of the message.', }, { name: 'annotations', @@ -264,98 +246,146 @@ Allows you to easily create a conversational user interface for your chatbot app 'Additional annotations sent along with the message.', }, { - name: 'toolInvocations', - type: 'Array', - isOptional: true, + name: 'parts', + type: 'Array', description: - 'An array of tool invocations that are associated with the (assistant) message.', + 'An array of message parts that are associated with the message.', properties: [ { - type: 'ToolInvocation', + type: 'TextUIPart', + description: 'A text part of the message.', parameters: [ { - name: 'state', - type: "'partial-call'", - description: - 'The state of the tool call when it was partially created.', - }, - { - name: 'toolCallId', - type: 'string', - description: - 'ID of the tool call. This ID is used to match the tool call with the tool result.', + name: 'type', + type: '"text"', }, { - name: 'toolName', + name: 'text', type: 'string', - description: 'Name of the tool that is being called.', - }, - { - name: 'args', - type: 'any', - description: - 'Partial arguments of the tool call. This is a JSON-serializable object.', + description: 'The text content of the part.', }, ], }, { - type: 'ToolInvocation', + type: 'ReasoningUIPart', + description: 'A reasoning part of the message.', parameters: [ { - name: 'state', - type: "'call'", - description: - 'The state of the tool call when it was fully created.', + name: 'type', + type: '"reasoning"', }, { - name: 'toolCallId', + name: 'reasoning', type: 'string', - description: - 'ID of the tool call. This ID is used to match the tool call with the tool result.', - }, - { - name: 'toolName', - type: 'string', - description: 'Name of the tool that is being called.', - }, - { - name: 'args', - type: 'any', - description: - 'Arguments of the tool call. This is a JSON-serializable object that matches the tools input schema.', + description: 'The reasoning content of the part.', }, ], }, { - type: 'ToolInvocation', + type: 'ToolInvocationUIPart', + description: 'A tool invocation part of the message.', parameters: [ { - name: 'state', - type: "'result'", - description: - 'The state of the tool call when the result is available.', - }, - { - name: 'toolCallId', - type: 'string', - description: - 'ID of the tool call. This ID is used to match the tool call with the tool result.', - }, - { - name: 'toolName', - type: 'string', - description: 'Name of the tool that is being called.', - }, - { - name: 'args', - type: 'any', - description: - 'Arguments of the tool call. This is a JSON-serializable object that matches the tools input schema.', + name: 'type', + type: '"tool-invocation"', }, { - name: 'result', - type: 'any', - description: 'The result of the tool call.', + name: 'toolInvocation', + type: 'ToolInvocation', + properties: [ + { + type: 'ToolInvocation', + parameters: [ + { + name: 'state', + type: "'partial-call'", + description: + 'The state of the tool call when it was partially created.', + }, + { + name: 'toolCallId', + type: 'string', + description: + 'ID of the tool call. This ID is used to match the tool call with the tool result.', + }, + { + name: 'toolName', + type: 'string', + description: + 'Name of the tool that is being called.', + }, + { + name: 'args', + type: 'any', + description: + 'Partial arguments of the tool call. This is a JSON-serializable object.', + }, + ], + }, + { + type: 'ToolInvocation', + parameters: [ + { + name: 'state', + type: "'call'", + description: + 'The state of the tool call when it was fully created.', + }, + { + name: 'toolCallId', + type: 'string', + description: + 'ID of the tool call. This ID is used to match the tool call with the tool result.', + }, + { + name: 'toolName', + type: 'string', + description: + 'Name of the tool that is being called.', + }, + { + name: 'args', + type: 'any', + description: + 'Arguments of the tool call. This is a JSON-serializable object that matches the tools input schema.', + }, + ], + }, + { + type: 'ToolInvocation', + parameters: [ + { + name: 'state', + type: "'result'", + description: + 'The state of the tool call when the result is available.', + }, + { + name: 'toolCallId', + type: 'string', + description: + 'ID of the tool call. This ID is used to match the tool call with the tool result.', + }, + { + name: 'toolName', + type: 'string', + description: + 'Name of the tool that is being called.', + }, + { + name: 'args', + type: 'any', + description: + 'Arguments of the tool call. This is a JSON-serializable object that matches the tools input schema.', + }, + { + name: 'result', + type: 'any', + description: 'The result of the tool call.', + }, + ], + }, + ], }, ], }, diff --git a/examples/next-openai/app/api/use-chat-tools/route.ts b/examples/next-openai/app/api/use-chat-tools/route.ts index 51f5f40e933e..086d9d9d6fbc 100644 --- a/examples/next-openai/app/api/use-chat-tools/route.ts +++ b/examples/next-openai/app/api/use-chat-tools/route.ts @@ -1,3 +1,4 @@ +import { anthropic } from '@ai-sdk/anthropic'; import { openai } from '@ai-sdk/openai'; import { streamText, tool } from 'ai'; import { z } from 'zod'; @@ -10,6 +11,7 @@ export async function POST(req: Request) { const result = streamText({ model: openai('gpt-4o'), + // model: anthropic('claude-3-5-sonnet-latest'), messages, toolCallStreaming: true, maxSteps: 5, // multi-steps for server-side tools diff --git a/examples/next-openai/app/use-chat-persistence-single-message-tools/[id]/chat.tsx b/examples/next-openai/app/use-chat-persistence-single-message-tools/[id]/chat.tsx index 0ce45d642879..0078f3233915 100644 --- a/examples/next-openai/app/use-chat-persistence-single-message-tools/[id]/chat.tsx +++ b/examples/next-openai/app/use-chat-persistence-single-message-tools/[id]/chat.tsx @@ -1,6 +1,6 @@ 'use client'; -import { createIdGenerator, ToolInvocation } from 'ai'; +import { createIdGenerator } from 'ai'; import { Message, useChat } from 'ai/react'; export default function Chat({ @@ -37,79 +37,105 @@ export default function Chat({ return (
- {messages?.map((m: Message) => ( -
- {`${m.role}: `} - {m.toolInvocations?.map((toolInvocation: ToolInvocation) => { - const toolCallId = toolInvocation.toolCallId; + {messages?.map(message => ( +
+ {`${message.role}: `} + {message.parts.map(part => { + switch (part.type) { + case 'text': + return part.text; + case 'tool-invocation': { + const callId = part.toolInvocation.toolCallId; - // example of pre-rendering streaming tool calls - if (toolInvocation.state === 'partial-call') { - return ( -
-                  {JSON.stringify(toolInvocation, null, 2)}
-                
- ); - } + switch (part.toolInvocation.toolName) { + case 'askForConfirmation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ {part.toolInvocation.args.message} +
+ + +
+
+ ); + case 'result': + return ( +
+ Location access allowed:{' '} + {part.toolInvocation.result} +
+ ); + } + } - // render confirmation tool (client-side tool with user interaction) - if (toolInvocation.toolName === 'askForConfirmation') { - return ( -
- {toolInvocation.args.message} -
- {'result' in toolInvocation ? ( - {toolInvocation.result} - ) : ( - <> - - - - )} -
-
- ); - } + case 'getLocation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ Getting location... +
+ ); + case 'result': + return ( +
+ Location: {part.toolInvocation.result} +
+ ); + } + } - // other tools: - return 'result' in toolInvocation ? ( -
- Tool call {`${toolInvocation.toolName}: `} - {toolInvocation.result} -
- ) : ( -
- Calling {toolInvocation.toolName}... -
- ); - })}{' '} - {m.annotations && ( -
-              {JSON.stringify(m.annotations, null, 2)}
-            
- )} - {m.content} -
+ case 'getWeatherInformation': { + switch (part.toolInvocation.state) { + // example of pre-rendering streaming tool calls: + case 'partial-call': + return ( +
+                            {JSON.stringify(part.toolInvocation, null, 2)}
+                          
+ ); + case 'call': + return ( +
+ Getting weather information for{' '} + {part.toolInvocation.args.city}... +
+ ); + case 'result': + return ( +
+ Weather in {part.toolInvocation.args.city}:{' '} + {part.toolInvocation.result} +
+ ); + } + } + } + } + } + })}
))} diff --git a/examples/next-openai/app/use-chat-reasoning/page.tsx b/examples/next-openai/app/use-chat-reasoning/page.tsx index d40a6a174c8a..0f6dddbc3594 100644 --- a/examples/next-openai/app/use-chat-reasoning/page.tsx +++ b/examples/next-openai/app/use-chat-reasoning/page.tsx @@ -18,15 +18,25 @@ export default function Chat() { return (
- {messages.map(m => ( -
- {m.role === 'user' ? 'User: ' : 'AI: '} - {m.reasoning && ( -
-              {m.reasoning}
-            
- )} - {m.content} + {messages.map(message => ( +
+ {message.role === 'user' ? 'User: ' : 'AI: '} + {message.parts.map((part, index) => { + if (part.type === 'text') { + return
{part.text}
; + } + + if (part.type === 'reasoning') { + return ( +
+                  {part.reasoning}
+                
+ ); + } + })}
))} diff --git a/examples/next-openai/app/use-chat-tools/page.tsx b/examples/next-openai/app/use-chat-tools/page.tsx index 4df23edffe37..4c1f12cb646e 100644 --- a/examples/next-openai/app/use-chat-tools/page.tsx +++ b/examples/next-openai/app/use-chat-tools/page.tsx @@ -1,7 +1,6 @@ 'use client'; -import { ToolInvocation } from 'ai'; -import { Message, useChat } from 'ai/react'; +import { useChat } from 'ai/react'; export default function Chat() { const { messages, input, handleInputChange, handleSubmit, addToolResult } = @@ -25,74 +24,108 @@ export default function Chat() { return (
- {messages?.map((m: Message) => ( -
- {`${m.role}: `} - {m.toolInvocations?.map((toolInvocation: ToolInvocation) => { - const toolCallId = toolInvocation.toolCallId; + {messages?.map(message => ( +
+ {`${message.role}: `} + {message.parts.map(part => { + switch (part.type) { + case 'text': + return part.text; + case 'tool-invocation': { + const callId = part.toolInvocation.toolCallId; - // example of pre-rendering streaming tool calls - if (toolInvocation.state === 'partial-call') { - return ( -
-                  {JSON.stringify(toolInvocation, null, 2)}
-                
- ); - } + switch (part.toolInvocation.toolName) { + case 'askForConfirmation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ {part.toolInvocation.args.message} +
+ + +
+
+ ); + case 'result': + return ( +
+ Location access allowed:{' '} + {part.toolInvocation.result} +
+ ); + } + break; + } - // render confirmation tool (client-side tool with user interaction) - if (toolInvocation.toolName === 'askForConfirmation') { - return ( -
- {toolInvocation.args.message} -
- {'result' in toolInvocation ? ( - {toolInvocation.result} - ) : ( - <> - - - - )} -
-
- ); - } + case 'getLocation': { + switch (part.toolInvocation.state) { + case 'call': + return ( +
+ Getting location... +
+ ); + case 'result': + return ( +
+ Location: {part.toolInvocation.result} +
+ ); + } + break; + } - // other tools: - return 'result' in toolInvocation ? ( -
- Tool call {`${toolInvocation.toolName}: `} - {toolInvocation.result} -
- ) : ( -
- Calling {toolInvocation.toolName}... -
- ); + case 'getWeatherInformation': { + switch (part.toolInvocation.state) { + // example of pre-rendering streaming tool calls: + case 'partial-call': + return ( +
+                            {JSON.stringify(part.toolInvocation, null, 2)}
+                          
+ ); + case 'call': + return ( +
+ Getting weather information for{' '} + {part.toolInvocation.args.city}... +
+ ); + case 'result': + return ( +
+ Weather in {part.toolInvocation.args.city}:{' '} + {part.toolInvocation.result} +
+ ); + } + break; + } + } + } + } })} - {m.content} -

))} diff --git a/examples/next-openai/package.json b/examples/next-openai/package.json index 2d07f6dcf469..0182b0c28e00 100644 --- a/examples/next-openai/package.json +++ b/examples/next-openai/package.json @@ -9,6 +9,7 @@ "lint": "next lint" }, "dependencies": { + "@ai-sdk/anthropic": "1.1.6", "@ai-sdk/deepseek": "0.1.8", "@ai-sdk/openai": "1.1.9", "@ai-sdk/ui-utils": "1.1.9", diff --git a/examples/nuxt-openai/pages/use-chat-tools/index.vue b/examples/nuxt-openai/pages/use-chat-tools/index.vue index 8e0ee045ae65..bd6374a60e94 100644 --- a/examples/nuxt-openai/pages/use-chat-tools/index.vue +++ b/examples/nuxt-openai/pages/use-chat-tools/index.vue @@ -13,65 +13,116 @@ const { input, handleSubmit, messages, addToolResult } = useChat({ } }, }); + +const messageList = computed(() => messages.value); // computer property for type inference