Loading...
Loading...
Complete guide for OpenAI's Assistants API v2: stateful conversational AI with built-in tools (Code Interpreter, File Search, Function Calling), vector stores for RAG (up to 10,000 files), thread/run lifecycle management, and streaming patterns. Both Node.js SDK and fetch approaches. ⚠️ DEPRECATION NOTICE: OpenAI plans to sunset Assistants API in H1 2026 in favor of Responses API. This skill remains valuable for existing apps and migration planning. Use when: building stateful chatbots with OpenAI, implementing RAG with vector stores, executing Python code with Code Interpreter, using file search for document Q&A, managing conversation threads, streaming assistant responses, or encountering errors like "thread already has active run", vector store indexing delays, run polling timeouts, or file upload issues. Keywords: openai assistants, assistants api, openai threads, openai runs, code interpreter assistant, file search openai, vector store openai, openai rag, assistant streaming, thread persistence, stateful chatbot, thread already has active run, run status polling, vector store error
npx skill4agent add jackspace/claudeskillz openai-assistantsreferences/migration-to-responses.mdnpm install openai@6.7.0export OPENAI_API_KEY="sk-..."import OpenAI from 'openai';
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
});
// 1. Create an assistant
const assistant = await openai.beta.assistants.create({
name: "Math Tutor",
instructions: "You are a personal math tutor. Write and run code to answer math questions.",
tools: [{ type: "code_interpreter" }],
model: "gpt-4o",
});
// 2. Create a thread
const thread = await openai.beta.threads.create();
// 3. Add a message to the thread
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "I need to solve the equation `3x + 11 = 14`. Can you help me?",
});
// 4. Create a run
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// 5. Poll for completion
let runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (runStatus.status !== 'completed') {
await new Promise(resolve => setTimeout(resolve, 1000));
runStatus = await openai.beta.threads.runs.retrieve(thread.id, run.id);
}
// 6. Retrieve messages
const messages = await openai.beta.threads.messages.list(thread.id);
console.log(messages.data[0].content[0].text.value);// 1. Create assistant
const assistant = await fetch('https://api.openai.com/v1/assistants', {
method: 'POST',
headers: {
'Authorization': `Bearer ${env.OPENAI_API_KEY}`,
'Content-Type': 'application/json',
'OpenAI-Beta': 'assistants=v2',
},
body: JSON.stringify({
name: "Math Tutor",
instructions: "You are a helpful math tutor.",
model: "gpt-4o",
}),
});
const assistantData = await assistant.json();
// 2. Create thread
const thread = await fetch('https://api.openai.com/v1/threads', {
method: 'POST',
headers: {
'Authorization': `Bearer ${env.OPENAI_API_KEY}`,
'Content-Type': 'application/json',
'OpenAI-Beta': 'assistants=v2',
},
});
const threadData = await thread.json();
// 3. Add message and create run
const run = await fetch(`https://api.openai.com/v1/threads/${threadData.id}/runs`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${env.OPENAI_API_KEY}`,
'Content-Type': 'application/json',
'OpenAI-Beta': 'assistants=v2',
},
body: JSON.stringify({
assistant_id: assistantData.id,
additional_messages: [{
role: "user",
content: "What is 3x + 11 = 14?",
}],
}),
});
// Poll for completion...const assistant = await openai.beta.assistants.create({
name: "Data Analyst",
instructions: "You are a data analyst. Use code interpreter to analyze data and create visualizations.",
model: "gpt-4o",
tools: [
{ type: "code_interpreter" },
{ type: "file_search" },
],
tool_resources: {
file_search: {
vector_store_ids: ["vs_abc123"],
},
},
metadata: {
department: "analytics",
version: "1.0",
},
});modelinstructionsnamedescriptiontoolstool_resourcestemperaturetop_presponse_formatmetadataconst assistant = await openai.beta.assistants.retrieve("asst_abc123");const updatedAssistant = await openai.beta.assistants.update("asst_abc123", {
instructions: "Updated instructions",
tools: [{ type: "code_interpreter" }, { type: "file_search" }],
});await openai.beta.assistants.del("asst_abc123");const assistants = await openai.beta.assistants.list({
limit: 20,
order: "desc",
});// Empty thread
const thread = await openai.beta.threads.create();
// Thread with initial messages
const thread = await openai.beta.threads.create({
messages: [
{
role: "user",
content: "Hello! I need help with Python.",
metadata: { source: "web" },
},
],
metadata: {
user_id: "user_123",
session_id: "session_456",
},
});const thread = await openai.beta.threads.retrieve("thread_abc123");const thread = await openai.beta.threads.update("thread_abc123", {
metadata: {
user_id: "user_123",
last_active: new Date().toISOString(),
},
});await openai.beta.threads.del("thread_abc123");const message = await openai.beta.threads.messages.create("thread_abc123", {
role: "user",
content: "Can you analyze this data?",
attachments: [
{
file_id: "file_abc123",
tools: [{ type: "code_interpreter" }],
},
],
metadata: {
timestamp: new Date().toISOString(),
},
});rolecontentattachmentsmetadataconst message = await openai.beta.threads.messages.retrieve(
"thread_abc123",
"msg_abc123"
);const messages = await openai.beta.threads.messages.list("thread_abc123", {
limit: 20,
order: "desc", // "asc" or "desc"
});
// Iterate through messages
for (const message of messages.data) {
console.log(`${message.role}: ${message.content[0].text.value}`);
}const message = await openai.beta.threads.messages.update(
"thread_abc123",
"msg_abc123",
{
metadata: {
edited: "true",
edit_timestamp: new Date().toISOString(),
},
}
);await openai.beta.threads.messages.del("thread_abc123", "msg_abc123");const run = await openai.beta.threads.runs.create("thread_abc123", {
assistant_id: "asst_abc123",
instructions: "Please address the user as Jane Doe.",
additional_messages: [
{
role: "user",
content: "Can you help me with this?",
},
],
});assistant_idinstructionsadditional_messagestoolsmetadatatemperaturetop_pmax_prompt_tokensmax_completion_tokensconst run = await openai.beta.threads.runs.retrieve(
"thread_abc123",
"run_abc123"
);
console.log(run.status); // queued, in_progress, requires_action, completed, failed, etc.| State | Description |
|---|---|
| Run is waiting to start |
| Run is executing |
| Function calling needs your input |
| Cancellation in progress |
| Run was cancelled |
| Run failed (check |
| Run finished successfully |
| Run expired (max 10 minutes) |
async function pollRunCompletion(threadId: string, runId: string) {
let run = await openai.beta.threads.runs.retrieve(threadId, runId);
while (['queued', 'in_progress', 'cancelling'].includes(run.status)) {
await new Promise(resolve => setTimeout(resolve, 1000)); // Wait 1 second
run = await openai.beta.threads.runs.retrieve(threadId, runId);
}
if (run.status === 'failed') {
throw new Error(`Run failed: ${run.last_error?.message}`);
}
if (run.status === 'requires_action') {
// Handle function calling (see Function Calling section)
return run;
}
return run; // completed
}
const run = await openai.beta.threads.runs.create(threadId, { assistant_id: assistantId });
const completedRun = await pollRunCompletion(threadId, run.id);const run = await openai.beta.threads.runs.cancel("thread_abc123", "run_abc123");statuscancelledconst runs = await openai.beta.threads.runs.list("thread_abc123", {
limit: 10,
order: "desc",
});const stream = await openai.beta.threads.runs.stream("thread_abc123", {
assistant_id: "asst_abc123",
});
for await (const event of stream) {
if (event.event === 'thread.message.delta') {
const delta = event.data.delta.content?.[0]?.text?.value;
if (delta) {
process.stdout.write(delta);
}
}
}| Event | Description |
|---|---|
| Run was created |
| Run started |
| Step created (tool call, message creation) |
| Step progress update |
| Message created |
| Message content streaming |
| Message finished |
| Run finished |
| Run failed |
| Function calling needed |
async function streamAssistantResponse(threadId: string, assistantId: string) {
const stream = await openai.beta.threads.runs.stream(threadId, {
assistant_id: assistantId,
});
for await (const event of stream) {
switch (event.event) {
case 'thread.run.created':
console.log('\\nRun started...');
break;
case 'thread.message.delta':
const delta = event.data.delta.content?.[0];
if (delta?.type === 'text' && delta.text?.value) {
process.stdout.write(delta.text.value);
}
break;
case 'thread.run.step.delta':
const toolCall = event.data.delta.step_details;
if (toolCall?.type === 'tool_calls') {
const codeInterpreter = toolCall.tool_calls?.[0]?.code_interpreter;
if (codeInterpreter?.input) {
console.log('\\nExecuting code:', codeInterpreter.input);
}
}
break;
case 'thread.run.completed':
console.log('\\n\\nRun completed!');
break;
case 'thread.run.failed':
console.error('\\nRun failed:', event.data.last_error);
break;
case 'thread.run.requires_action':
// Handle function calling
console.log('\\nFunction calling required');
break;
}
}
}const assistant = await openai.beta.assistants.create({
name: "Data Analyst",
instructions: "You are a data analyst. Use Python to analyze data and create visualizations.",
model: "gpt-4o",
tools: [{ type: "code_interpreter" }],
});
// Upload a file
const file = await openai.files.create({
file: fs.createReadStream("sales_data.csv"),
purpose: "assistants",
});
// Create thread with file
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "Analyze this sales data and create a visualization.",
attachments: [{
file_id: file.id,
tools: [{ type: "code_interpreter" }],
}],
}],
});
// Run
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// Poll for completion and retrieve outputsconst messages = await openai.beta.threads.messages.list(thread.id);
const message = messages.data[0];
for (const content of message.content) {
if (content.type === 'image_file') {
const fileId = content.image_file.file_id;
const fileContent = await openai.files.content(fileId);
// Save or process file
}
}// 1. Create vector store
const vectorStore = await openai.beta.vectorStores.create({
name: "Product Documentation",
metadata: { category: "docs" },
});
// 2. Upload files to vector store
const file = await openai.files.create({
file: fs.createReadStream("product_guide.pdf"),
purpose: "assistants",
});
await openai.beta.vectorStores.files.create(vectorStore.id, {
file_id: file.id,
});
// 3. Create assistant with file search
const assistant = await openai.beta.assistants.create({
name: "Product Support",
instructions: "Use file search to answer questions about our products.",
model: "gpt-4o",
tools: [{ type: "file_search" }],
tool_resources: {
file_search: {
vector_store_ids: [vectorStore.id],
},
},
});
// 4. Create thread and run
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "How do I install the product?",
}],
});
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});completedconst assistant = await openai.beta.assistants.create({
name: "Weather Assistant",
instructions: "You help users get weather information.",
model: "gpt-4o",
tools: [{
type: "function",
function: {
name: "get_weather",
description: "Get the current weather for a location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "City name, e.g., 'San Francisco'",
},
unit: {
type: "string",
enum: ["celsius", "fahrenheit"],
description: "Temperature unit",
},
},
required: ["location"],
},
},
}],
});
// Create thread and run
const thread = await openai.beta.threads.create({
messages: [{
role: "user",
content: "What's the weather in San Francisco?",
}],
});
let run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// Poll until requires_action
while (run.status === 'in_progress' || run.status === 'queued') {
await new Promise(resolve => setTimeout(resolve, 1000));
run = await openai.beta.threads.runs.retrieve(thread.id, run.id);
}
if (run.status === 'requires_action') {
const toolCalls = run.required_action.submit_tool_outputs.tool_calls;
const toolOutputs = [];
for (const toolCall of toolCalls) {
if (toolCall.function.name === 'get_weather') {
const args = JSON.parse(toolCall.function.arguments);
// Call your actual weather API
const weather = await getWeatherAPI(args.location, args.unit);
toolOutputs.push({
tool_call_id: toolCall.id,
output: JSON.stringify(weather),
});
}
}
// Submit tool outputs
run = await openai.beta.threads.runs.submitToolOutputs(thread.id, run.id, {
tool_outputs: toolOutputs,
});
// Continue polling...
}const vectorStore = await openai.beta.vectorStores.create({
name: "Legal Documents",
metadata: {
department: "legal",
category: "contracts",
},
expires_after: {
anchor: "last_active_at",
days: 7, // Auto-delete 7 days after last use
},
});const file = await openai.files.create({
file: fs.createReadStream("contract.pdf"),
purpose: "assistants",
});
await openai.beta.vectorStores.files.create(vectorStore.id, {
file_id: file.id,
});const fileBatch = await openai.beta.vectorStores.fileBatches.create(vectorStore.id, {
file_ids: ["file_abc123", "file_def456", "file_ghi789"],
});
// Poll for batch completion
let batch = await openai.beta.vectorStores.fileBatches.retrieve(vectorStore.id, fileBatch.id);
while (batch.status === 'in_progress') {
await new Promise(resolve => setTimeout(resolve, 1000));
batch = await openai.beta.vectorStores.fileBatches.retrieve(vectorStore.id, fileBatch.id);
}const vectorStore = await openai.beta.vectorStores.retrieve("vs_abc123");
console.log(vectorStore.status); // "in_progress", "completed", "failed"
console.log(vectorStore.file_counts); // { in_progress: 0, completed: 50, failed: 0 }status: "completed"const stores = await openai.beta.vectorStores.list({
limit: 20,
order: "desc",
});const vectorStore = await openai.beta.vectorStores.update("vs_abc123", {
name: "Updated Name",
metadata: { updated: "true" },
});await openai.beta.vectorStores.del("vs_abc123");import fs from 'fs';
const file = await openai.files.create({
file: fs.createReadStream("document.pdf"),
purpose: "assistants",
});
console.log(file.id); // file_abc123const file = await openai.files.retrieve("file_abc123");const content = await openai.files.content("file_abc123");
// Returns binary contentawait openai.files.del("file_abc123");const files = await openai.files.list({
purpose: "assistants",
});async function getOrCreateUserThread(userId: string): Promise<string> {
// Check if thread exists in your database
let threadId = await db.getThreadIdForUser(userId);
if (!threadId) {
// Create new thread
const thread = await openai.beta.threads.create({
metadata: { user_id: userId },
});
threadId = thread.id;
await db.saveThreadIdForUser(userId, threadId);
}
return threadId;
}async function ensureNoActiveRun(threadId: string) {
const runs = await openai.beta.threads.runs.list(threadId, {
limit: 1,
order: "desc",
});
const latestRun = runs.data[0];
if (latestRun && ['queued', 'in_progress', 'cancelling'].includes(latestRun.status)) {
throw new Error('Thread already has an active run. Wait or cancel first.');
}
}
// Before creating new run
await ensureNoActiveRun(threadId);
const run = await openai.beta.threads.runs.create(threadId, { assistant_id });async function cleanupOldThreads(maxAgeHours = 24) {
const threads = await openai.beta.threads.list({ limit: 100 });
for (const thread of threads.data) {
const createdAt = new Date(thread.created_at * 1000);
const ageHours = (Date.now() - createdAt.getTime()) / (1000 * 60 * 60);
if (ageHours > maxAgeHours) {
await openai.beta.threads.del(thread.id);
}
}
}Error: 400 Can't add messages to thread_xxx while a run run_xxx is active.// Wait for run to complete or cancel it
const run = await openai.beta.threads.runs.retrieve(threadId, runId);
if (['queued', 'in_progress'].includes(run.status)) {
await openai.beta.threads.runs.cancel(threadId, runId);
// Wait for cancellation
while (run.status !== 'cancelled') {
await new Promise(resolve => setTimeout(resolve, 500));
run = await openai.beta.threads.runs.retrieve(threadId, runId);
}
}async function pollWithTimeout(threadId: string, runId: string, maxSeconds = 300) {
const startTime = Date.now();
while (true) {
const run = await openai.beta.threads.runs.retrieve(threadId, runId);
if (!['queued', 'in_progress'].includes(run.status)) {
return run;
}
const elapsed = (Date.now() - startTime) / 1000;
if (elapsed > maxSeconds) {
await openai.beta.threads.runs.cancel(threadId, runId);
throw new Error('Run exceeded timeout');
}
await new Promise(resolve => setTimeout(resolve, 1000));
}
}async function waitForVectorStore(vectorStoreId: string) {
let store = await openai.beta.vectorStores.retrieve(vectorStoreId);
while (store.status === 'in_progress') {
await new Promise(resolve => setTimeout(resolve, 2000));
store = await openai.beta.vectorStores.retrieve(vectorStoreId);
}
if (store.status === 'failed') {
throw new Error('Vector store indexing failed');
}
return store;
}const SUPPORTED_FORMATS = {
code_interpreter: ['.csv', '.json', '.pdf', '.txt', '.py', '.js', '.xlsx'],
file_search: ['.pdf', '.docx', '.txt', '.md', '.html'],
};
function validateFile(filename: string, tool: string) {
const ext = filename.substring(filename.lastIndexOf('.')).toLowerCase();
if (!SUPPORTED_FORMATS[tool].includes(ext)) {
throw new Error(`Unsupported file format for ${tool}: ${ext}`);
}
}references/top-errors.md// Creates new assistant on every request!
const assistant = await openai.beta.assistants.create({ ... });// Create once, store ID, reuse
const ASSISTANT_ID = process.env.ASSISTANT_ID || await createAssistant();
async function createAssistant() {
const assistant = await openai.beta.assistants.create({ ... });
console.log('Save this ID:', assistant.id);
return assistant.id;
}async function createRunWithRetry(threadId: string, assistantId: string, maxRetries = 3) {
for (let i = 0; i < maxRetries; i++) {
try {
return await openai.beta.threads.runs.create(threadId, {
assistant_id: assistantId,
});
} catch (error) {
if (error.status === 429) {
// Rate limit - wait and retry
await new Promise(resolve => setTimeout(resolve, 2000 * (i + 1)));
continue;
}
if (error.message?.includes('active run')) {
// Wait for active run to complete
await new Promise(resolve => setTimeout(resolve, 5000));
continue;
}
throw error; // Other errors
}
}
throw new Error('Max retries exceeded');
}// Track usage
const run = await openai.beta.threads.runs.retrieve(threadId, runId);
console.log('Tokens used:', run.usage);
// { prompt_tokens: 150, completion_tokens: 200, total_tokens: 350 }
// Set limits
const run = await openai.beta.threads.runs.create(threadId, {
assistant_id: assistantId,
max_prompt_tokens: 1000,
max_completion_tokens: 500,
});// Delete old threads
async function cleanupUserThread(userId: string) {
const threadId = await db.getThreadIdForUser(userId);
if (threadId) {
await openai.beta.threads.del(threadId);
await db.deleteThreadIdForUser(userId);
}
}
// Delete unused vector stores
async function cleanupVectorStores(keepDays = 30) {
const stores = await openai.beta.vectorStores.list({ limit: 100 });
for (const store of stores.data) {
const ageSeconds = Date.now() / 1000 - store.created_at;
const ageDays = ageSeconds / (60 * 60 * 24);
if (ageDays > keepDays) {
await openai.beta.vectorStores.del(store.id);
}
}
}// Show progress in real-time
async function streamToUser(threadId: string, assistantId: string) {
const stream = await openai.beta.threads.runs.stream(threadId, {
assistant_id: assistantId,
});
for await (const event of stream) {
if (event.event === 'thread.message.delta') {
const delta = event.data.delta.content?.[0]?.text?.value;
if (delta) {
// Send to user immediately
sendToClient(delta);
}
}
}
}references/migration-to-responses.mdretrievalfile_searchreferences/migration-from-v1.mdtemplates/basic-assistant.tstemplates/code-interpreter-assistant.tstemplates/file-search-assistant.tstemplates/function-calling-assistant.tstemplates/streaming-assistant.tsreferences/top-errors.mdreferences/thread-lifecycle.mdreferences/vector-stores.mdopenai-responsesopenai-api