File: reading-ui-message-streams.md | Updated: 11/15/2025
Menu
v5 (Latest)
AI SDK 5.x
Model Context Protocol (MCP) Tools
Copy markdown
======================================================================================================================
UIMessage streams are useful outside of traditional chat use cases. You can consume them for terminal UIs, custom stream processing on the client, or React Server Components (RSC).
The readUIMessageStream helper transforms a stream of UIMessageChunk objects into an AsyncIterableStream of UIMessage objects, allowing you to process messages as they're being constructed.
import { openai } from '@ai-sdk/openai';import { readUIMessageStream, streamText } from 'ai';
async function main() { const result = streamText({ model: openai('gpt-4o'), prompt: 'Write a short story about a robot.', });
for await (const uiMessage of readUIMessageStream({ stream: result.toUIMessageStream(), })) { console.log('Current message state:', uiMessage); }}
Handle streaming responses that include tool calls:
import { openai } from '@ai-sdk/openai';import { readUIMessageStream, streamText, tool } from 'ai';import { z } from 'zod';
async function handleToolCalls() { const result = streamText({ model: openai('gpt-4o'), tools: { weather: tool({ description: 'Get the weather in a location', inputSchema: z.object({ location: z.string().describe('The location to get the weather for'), }), execute: ({ location }) => ({ location, temperature: 72 + Math.floor(Math.random() * 21) - 10, }), }), }, prompt: 'What is the weather in Tokyo?', });
for await (const uiMessage of readUIMessageStream({ stream: result.toUIMessageStream(), })) { // Handle different part types uiMessage.parts.forEach(part => { switch (part.type) { case 'text': console.log('Text:', part.text); break; case 'tool-call': console.log('Tool called:', part.toolName, 'with args:', part.args); break; case 'tool-result': console.log('Tool result:', part.result); break; } }); }}
Resume streaming from a previous message state:
import { readUIMessageStream, streamText } from 'ai';
async function resumeConversation(lastMessage: UIMessage) { const result = streamText({ model: openai('gpt-4o'), messages: [ { role: 'user', content: 'Continue our previous conversation.' }, ], });
// Resume from the last message for await (const uiMessage of readUIMessageStream({ stream: result.toUIMessageStream(), message: lastMessage, // Resume from this message })) { console.log('Resumed message:', uiMessage); }}
On this page
Deploy and Scale AI Apps with Vercel.
Vercel delivers the infrastructure and developer experience you need to ship reliable AI-powered applications at scale.
Trusted by industry leaders: