Loading...
Loading...
Guide for assistant-stream package and streaming protocols. Use when implementing streaming backends, custom protocols, or debugging stream issues.
npx skill4agent add assistant-ui/skills streamingassistant-streamUsing Vercel AI SDK?
├─ Yes → toUIMessageStreamResponse() (no assistant-stream needed)
└─ No → assistant-stream for custom backendsnpm install assistant-streamimport { createAssistantStreamResponse } from "assistant-stream";
export async function POST(req: Request) {
return createAssistantStreamResponse(async (stream) => {
stream.appendText("Hello ");
stream.appendText("world!");
// Tool call example
const tool = stream.addToolCallPart({ toolCallId: "1", toolName: "get_weather" });
tool.argsText.append('{"city":"NYC"}');
tool.argsText.close();
tool.setResponse({ result: { temperature: 22 } });
stream.close();
});
}useLocalRuntimeChatModelRunResultimport { useLocalRuntime } from "@assistant-ui/react";
const runtime = useLocalRuntime({
model: {
async *run({ messages, abortSignal }) {
const response = await fetch("/api/chat", {
method: "POST",
body: JSON.stringify({ messages }),
signal: abortSignal,
});
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let buffer = "";
while (reader) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
const parts = buffer.split("\n");
buffer = parts.pop() ?? "";
for (const chunk of parts.filter(Boolean)) {
yield { content: [{ type: "text", text: chunk }] };
}
}
},
},
});import { AssistantStream, DataStreamDecoder } from "assistant-stream";
const stream = AssistantStream.fromResponse(response, new DataStreamDecoder());
for await (const event of stream) {
console.log("Event:", JSON.stringify(event, null, 2));
}part-startpart.type"text" | "reasoning" | "tool-call" | "source" | "file"text-deltaresultstep-startstep-finishmessage-finisherrortext/event-streamaddToolCallParttoolCallIdtoolNamemakeAssistantToolUItext-delta