Loading...
Loading...
Build stateful chatbots with OpenAI Assistants API v2 - Code Interpreter, File Search (10k files), Function Calling. Prevents 10 documented errors including vector store upload bugs, temperature parameter conflicts, memory leaks. Deprecated (sunset August 2026); use openai-responses for new projects. Use when: maintaining legacy chatbots, implementing RAG with vector stores, or troubleshooting thread errors, vector store delays, uploadAndPoll issues.
npx skill4agent add jezweb/claude-skills openai-assistantsopenai-responsesreferences/migration-to-responses.mdnpm install openai@6.16.0import OpenAI from 'openai';
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
// 1. Create assistant
const assistant = await openai.beta.assistants.create({
name: "Math Tutor",
instructions: "You are a math tutor. Use code interpreter for calculations.",
tools: [{ type: "code_interpreter" }],
model: "gpt-5",
});
// 2. Create thread
const thread = await openai.beta.threads.create();
// 3. Add message
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "Solve: 3x + 11 = 14",
});
// 4. Run assistant
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: assistant.id,
});
// 5. Poll for completion
let status = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (status.status !== 'completed') {
await new Promise(r => setTimeout(r, 1000));
status = await openai.beta.threads.runs.retrieve(thread.id, run.id);
}
// 6. Get response
const messages = await openai.beta.threads.messages.list(thread.id);
console.log(messages.data[0].content[0].text.value);const assistant = await openai.beta.assistants.create({
model: "gpt-5",
instructions: "System prompt (max 256k chars in v2)",
tools: [{ type: "code_interpreter" }, { type: "file_search" }],
tool_resources: { file_search: { vector_store_ids: ["vs_123"] } },
});// Create thread with messages
const thread = await openai.beta.threads.create({
messages: [{ role: "user", content: "Hello" }],
});
// Add message with attachments
await openai.beta.threads.messages.create(thread.id, {
role: "user",
content: "Analyze this",
attachments: [{ file_id: "file_123", tools: [{ type: "code_interpreter" }] }],
});
// List messages
const msgs = await openai.beta.threads.messages.list(thread.id);// Create run with optional overrides
const run = await openai.beta.threads.runs.create(thread.id, {
assistant_id: "asst_123",
additional_messages: [{ role: "user", content: "Question" }],
max_prompt_tokens: 1000,
max_completion_tokens: 500,
});
// Poll until complete
let status = await openai.beta.threads.runs.retrieve(thread.id, run.id);
while (['queued', 'in_progress'].includes(status.status)) {
await new Promise(r => setTimeout(r, 1000));
status = await openai.beta.threads.runs.retrieve(thread.id, run.id);
}queuedin_progressrequires_actioncompletedfailedcancelledexpiredconst stream = await openai.beta.threads.runs.stream(thread.id, { assistant_id });
for await (const event of stream) {
if (event.event === 'thread.message.delta') {
process.stdout.write(event.data.delta.content?.[0]?.text?.value || '');
}
}thread.run.createdthread.message.deltathread.run.step.deltathread.run.completedthread.run.requires_action// Attach file to message
attachments: [{ file_id: "file_123", tools: [{ type: "code_interpreter" }] }]
// Access generated files
for (const content of message.content) {
if (content.type === 'image_file') {
const fileContent = await openai.files.content(content.image_file.file_id);
}
}// Create vector store
const vs = await openai.beta.vectorStores.create({ name: "Docs" });
await openai.beta.vectorStores.files.create(vs.id, { file_id: "file_123" });
// Wait for indexing
let store = await openai.beta.vectorStores.retrieve(vs.id);
while (store.status === 'in_progress') {
await new Promise(r => setTimeout(r, 2000));
store = await openai.beta.vectorStores.retrieve(vs.id);
}
// Use in assistant
tool_resources: { file_search: { vector_store_ids: [vs.id] } }status: 'completed'if (run.status === 'requires_action') {
const toolCalls = run.required_action.submit_tool_outputs.tool_calls;
const outputs = toolCalls.map(tc => ({
tool_call_id: tc.id,
output: JSON.stringify(yourFunction(JSON.parse(tc.function.arguments))),
}));
run = await openai.beta.threads.runs.submitToolOutputs(thread.id, run.id, {
tool_outputs: outputs,
});
}Error: 400 Can't add messages to thread_xxx while a run run_xxx is active.await openai.beta.threads.runs.cancel(threadId, runId)Error: OpenAIError: Final run has not been receivedincompletetry {
const stream = await openai.beta.threads.runs.stream(thread.id, { assistant_id });
for await (const event of stream) {
if (event.event === 'thread.message.delta') {
process.stdout.write(event.data.delta.content?.[0]?.text?.value || '');
}
}
} catch (error) {
if (error.message?.includes('Final run has not been received')) {
// Run ended with 'incomplete' status - thread can continue
const run = await openai.beta.threads.runs.retrieve(thread.id, runId);
if (run.status === 'incomplete') {
// Handle: prompt user to continue, reduce max_completion_tokens, etc.
}
}
}vectorStores.retrieve()status === 'completed'Error: No 'files' provided to processuploadAndPoll{ files: [...] }// ✅ Correct
await openai.beta.vectorStores.fileBatches.uploadAndPoll(vectorStoreId, {
files: fileStreams
});
// ❌ Wrong (shown in official docs)
await openai.beta.vectorStores.fileBatches.uploadAndPoll(vectorStoreId, fileStreams);Error: Unsupported parameter: 'temperature' is not supported with this modelnullawait openai.beta.assistants.update(assistantId, {
model: 'o3-mini',
reasoning_effort: 'medium',
temperature: null, // ✅ Must explicitly clear
top_p: null
});Error: Invalid 'batch_id': 'vs_...'. Expected an ID that begins with 'vsfb_'.uploadAndPoll// Option 1: Use createAndPoll after separate upload
const batch = await openai.vectorStores.fileBatches.createAndPoll(
vectorStoreId,
{ file_ids: uploadedFileIds }
);
// Option 2: List batches to find correct ID
const batches = await openai.vectorStores.fileBatches.list(vectorStoreId);
const batchId = batches.data[0].id; // starts with 'vsfb_'// ❌ This deletes file from VS_A, VS_B, AND VS_C
await openai.vectorStores.files.delete('VS_A', 'file-xxx');vectorStores.fileBatches.uploadAndPollasync function createRunSafely(threadId: string, assistantId: string) {
// Check for active runs first
const runs = await openai.beta.threads.runs.list(threadId, { limit: 1 });
const activeRun = runs.data.find(r =>
['queued', 'in_progress', 'requires_action'].includes(r.status)
);
if (activeRun) {
try {
await openai.beta.threads.runs.cancel(threadId, activeRun.id);
// Wait for cancellation to complete
let run = await openai.beta.threads.runs.retrieve(threadId, activeRun.id);
while (run.status === 'cancelling') {
await new Promise(r => setTimeout(r, 500));
run = await openai.beta.threads.runs.retrieve(threadId, activeRun.id);
}
} catch (error) {
// Ignore "already completed" errors - run finished naturally
if (!error.message?.includes('completed')) throw error;
}
}
return openai.beta.threads.runs.create(threadId, { assistant_id: assistantId });
}references/top-errors.mdreferences/migration-to-responses.mdretrievalfile_searchreferences/migration-from-v1.mdtemplates/basic-assistant.tscode-interpreter-assistant.tsfile-search-assistant.tsfunction-calling-assistant.tsstreaming-assistant.tsreferences/top-errors.mdthread-lifecycle.mdvector-stores.mdmigration-to-responses.mdmigration-from-v1.mdopenai-responsesopenai-api