Creates a streamable UI from LLMs in React Server Components.
import { streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
export async function generateUI() {
const result = await streamUI({
model: openai('gpt-4-turbo'),
prompt: 'Get the weather for San Francisco',
text: ({ content }) => <div>{content}</div>,
tools: {
getWeather: {
description: 'Get the weather for a location',
inputSchema: z.object({
location: z.string(),
}),
generate: async function* ({ location }) {
yield <div>Loading weather for {location}...</div>;
const weather = await fetchWeather(location);
return <div>Weather in {location}: {weather.temp}°F</div>;
},
},
},
});
return result.value;
}
Parameters
The language model to use.
A simple text prompt. You can either use prompt or messages but not both.
A list of messages. You can either use prompt or messages but not both.
A system message that will be part of the prompt.
tools
Record<string, RenderTool>
The tools that the model can call. The model needs to support calling tools.
The tool choice strategy. Default: 'auto'.
Renderer for text content. Receives an object with content, delta, and done properties.
Initial React node to display before any content is generated.
Maximum number of tokens to generate.
Temperature setting. The value is passed through to the provider.
Nucleus sampling. The value is passed through to the provider.
Only sample from the top K options for each subsequent token.
Presence penalty setting.
Frequency penalty setting.
The seed (integer) to use for random sampling.
Maximum number of retries. Set to 0 to disable retries.
An optional abort signal that can be used to cancel the call.
Additional HTTP headers to be sent with the request.
Additional provider-specific options.
onFinish
(event: OnFinishEvent) => void
Callback that is called when the LLM response and the final object validation are finished.
Description of what the tool does.
The schema that defines the input shape for the tool.
The render function for the tool. Can be:
- A function that returns a React node
- A generator function that yields intermediate React nodes
- An async generator function that yields intermediate React nodes
RenderText
A function that renders text content:
type RenderText = (args: {
content: string; // The full text content from the model so far
delta: string; // The new appended text since the last call
done: boolean; // Whether the model is done generating
}) => ReactNode | Generator<ReactNode> | AsyncGenerator<ReactNode>;
Returns
The React node value that can be returned from a Server Action and rendered on the client.
The underlying stream from the language model.
response
LanguageModelV3StreamResult['response']
Response metadata from the language model.
request
LanguageModelV3StreamResult['request']
Request metadata.
Examples
Basic text rendering
import { streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
export async function generateGreeting() {
const result = await streamUI({
model: openai('gpt-4-turbo'),
prompt: 'Say hello',
text: ({ content, done }) => (
<div>
{content}
{!done && <span className="cursor">▋</span>}
</div>
),
});
return result.value;
}
import { streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';
export async function generateWeather() {
const result = await streamUI({
model: openai('gpt-4-turbo'),
prompt: 'What is the weather in San Francisco?',
text: ({ content }) => <div>{content}</div>,
tools: {
getWeather: {
description: 'Get the weather for a location',
inputSchema: z.object({
location: z.string(),
}),
generate: async ({ location }) => {
const weather = await fetchWeather(location);
return (
<div className="weather-card">
<h3>{location}</h3>
<p>Temperature: {weather.temp}°F</p>
<p>Condition: {weather.condition}</p>
</div>
);
},
},
},
});
return result.value;
}
import { streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';
export async function generateStock() {
const result = await streamUI({
model: openai('gpt-4-turbo'),
prompt: 'Show me the stock price for AAPL',
text: ({ content }) => <div>{content}</div>,
tools: {
getStockPrice: {
description: 'Get the stock price for a symbol',
inputSchema: z.object({
symbol: z.string(),
}),
generate: async function* ({ symbol }) {
// Show loading state
yield <div>Loading {symbol}...</div>;
// Fetch data
const price = await fetchStockPrice(symbol);
// Show intermediate state
yield <div>{symbol}: ${price.current}</div>;
// Fetch additional data
const history = await fetchStockHistory(symbol);
// Show final state
return (
<div className="stock-card">
<h3>{symbol}</h3>
<p>Current: ${price.current}</p>
<p>Change: {price.change}%</p>
<div className="chart">
{/* Render history chart */}
</div>
</div>
);
},
},
},
});
return result.value;
}
import { streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
import { z } from 'zod';
export async function generateDashboard() {
const result = await streamUI({
model: openai('gpt-4-turbo'),
prompt: 'Show me the weather and stock price',
text: ({ content }) => <div>{content}</div>,
tools: {
getWeather: {
description: 'Get weather information',
inputSchema: z.object({ location: z.string() }),
generate: async ({ location }) => {
const weather = await fetchWeather(location);
return <WeatherCard {...weather} />;
},
},
getStockPrice: {
description: 'Get stock price',
inputSchema: z.object({ symbol: z.string() }),
generate: async ({ symbol }) => {
const stock = await fetchStock(symbol);
return <StockCard {...stock} />;
},
},
},
});
return result.value;
}