Loading...
Loading...
Provides comprehensive guidance for building AI-powered Generative UI applications with the Thesys C1 API and GenUI SDK. Use it when developing interactive UI components, chat interfaces, dashboards, or any application that benefits from dynamically generated React interfaces from natural language prompts.
npx skill4agent add thesysdev/skills thesys-c1-genui<ThemeProvider>onErrornpm install @thesysai/genui-sdk@latest @crayonai/react-ui@latest @crayonai/react-core@latest @crayonai/stream@latest openai@latest next@latest react@^19.0.0 react-dom@^19.0.0npx create-c1-app my-app
cd my-app
npm run devgit clone https://github.com/thesysdev/template-c1-fastapi.git
cd template-c1-fastapi
pip install -r requirements.txt
uvicorn main:app --reload
# In another terminal for frontend:
npm install && npm run devexport THESYS_API_KEY=<your-api-key><C1Component>import { C1Component, ThemeProvider } from "@thesysai/genui-sdk";
import "@crayonai/react-ui/styles/index.css";
<ThemeProvider>
<C1Component
c1Response={response}
isStreaming={isLoading}
onAction={({ llmFriendlyMessage, humanFriendlyMessage }) => {
// Handle button clicks, form submissions
}}
updateMessage={(updatedResponse) => {
// Persist state changes to database
}}
/>
</ThemeProvider><C1Chat>import { C1Chat } from "@thesysai/genui-sdk";
import "@crayonai/react-ui/styles/index.css";
<C1Chat
apiUrl="/api/chat"
agentName="My Assistant"
logoUrl="/logo.png"
formFactor="full-page" // or "side-panel"
/>| Feature | | |
|---|---|---|
| Render C1 DSL | ✅ | ✅ |
| Streaming | ✅ | ✅ |
| Forms & Actions | ✅ | ✅ |
| Message History | DIY | ✅ Built-in |
| Thread Management | DIY | ✅ Built-in |
| Chat UI | ❌ | ✅ |
https://api.thesys.dev/v1/embed<C1Component>import OpenAI from "openai";
const client = new OpenAI({
baseURL: "https://api.thesys.dev/v1/embed",
apiKey: process.env.THESYS_API_KEY,
});
const response = await client.chat.completions.create({
model: "c1/anthropic/claude-sonnet-4/v-20251230",
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "Show me a chart of sales data" }
],
stream: true,
});<thinking>Analyzing request...</thinking>
<content>
<!-- Interactive UI components -->
</content>
<artifact id="report-1">
<!-- Document content like slides/reports -->
</artifact>c1/anthropic/claude-sonnet-4/v-20251230c1/openai/gpt-5/v-20251230c1-exp/anthropic/claude-sonnet-4.5/v-20251230c1-exp/anthropic/claude-haiku-4.5/v-20251230c1/artifact/v-20251230npm install openai @crayonai/stream// app/api/chat/messageStore.ts
import OpenAI from "openai";
export type DBMessage = OpenAI.Chat.ChatCompletionMessageParam & { id?: string };
const messagesStore: { [threadId: string]: DBMessage[] } = {};
export const getMessageStore = (threadId: string) => {
if (!messagesStore[threadId]) {
messagesStore[threadId] = [];
}
return {
addMessage: (message: DBMessage) => {
messagesStore[threadId].push(message);
},
getOpenAICompatibleMessageList: () => {
return messagesStore[threadId].map((m) => {
const { id, ...rest } = m;
return rest;
});
},
};
};// app/api/chat/route.ts
import { NextRequest, NextResponse } from "next/server";
import OpenAI from "openai";
import { transformStream } from "@crayonai/stream";
import { DBMessage, getMessageStore } from "./messageStore";
export async function POST(req: NextRequest) {
const { prompt, threadId, responseId } = await req.json() as {
prompt: DBMessage;
threadId: string;
responseId: string;
};
const client = new OpenAI({
baseURL: "https://api.thesys.dev/v1/embed",
apiKey: process.env.THESYS_API_KEY,
});
const messageStore = getMessageStore(threadId);
messageStore.addMessage(prompt);
const llmStream = await client.chat.completions.create({
model: "c1/anthropic/claude-sonnet-4/v-20251230",
messages: messageStore.getOpenAICompatibleMessageList(),
stream: true,
});
const responseStream = transformStream(
llmStream,
(chunk) => chunk.choices[0].delta.content,
{
onEnd: ({ accumulated }) => {
const message = accumulated.filter(Boolean).join("");
messageStore.addMessage({
role: "assistant",
content: message,
id: responseId,
});
},
}
) as ReadableStream;
return new NextResponse(responseStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
},
});
}const client = new OpenAI({
baseURL: "https://api.thesys.dev/v1/embed", // Change from OpenAI
apiKey: process.env.THESYS_API_KEY,
});
// Use existing tools and system prompts
const response = await client.beta.chat.completions.runTools({
model: "c1/anthropic/claude-sonnet-4/v-20251230",
messages: [...],
tools: existingTools,
stream: true,
});// 1. Get text from your LLM
const textResponse = await yourLLM.generate(prompt);
// 2. Visualize with C1
const uiResponse = await c1Client.chat.completions.create({
model: "c1/anthropic/claude-sonnet-4/v-20251230",
messages: [{ role: "user", content: textResponse }],
});const tools = [{
type: "function",
function: {
name: "generate_ui",
description: "Generate interactive UI for user",
parameters: {
type: "object",
properties: {
content: { type: "string", description: "Content to visualize" }
}
}
}
}];useThreadManageruseThreadListManagerc1_custom_actionsonActionuseC1State<ThemeProvider>