Loading...
Loading...
Expert integration patterns for Claude API and TypeScript SDK covering Messages API, streaming responses, tool use, error handling, token optimization, and production-ready implementations for building AI-powered applications
npx skill4agent add manutej/luxor-claude-marketplace claude-sdk-integration-patterns# Install the SDK
npm install @anthropic-ai/sdk
# Or with yarn
yarn add @anthropic-ai/sdkimport Anthropic from '@anthropic-ai/sdk';
// Initialize client
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});const message = await anthropic.messages.create({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'Hello, Claude!' }
],
});
console.log(message.content);const messages = [
{ role: 'user', content: 'What is TypeScript?' },
{ role: 'assistant', content: 'TypeScript is a typed superset of JavaScript...' },
{ role: 'user', content: 'Give me an example' },
];
const response = await anthropic.messages.create({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages,
});const response = await anthropic.messages.create({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
system: 'You are a helpful Python programming assistant. Provide concise, tested code examples.',
messages: [
{ role: 'user', content: 'How do I read a CSV file?' }
],
});const stream = await anthropic.messages.create({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Write a story' }],
stream: true,
});
for await (const event of stream) {
if (event.type === 'content_block_delta' && event.delta.type === 'text_delta') {
process.stdout.write(event.delta.text);
}
}const stream = anthropic.messages.stream({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Explain quantum computing' }],
})
.on('text', (text) => {
console.log(text);
})
.on('message', (message) => {
console.log('Complete message:', message);
})
.on('error', (error) => {
console.error('Stream error:', error);
});
const finalMessage = await stream.finalMessage();const stream = anthropic.messages.stream({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Long task...' }],
});
// Abort after timeout
setTimeout(() => stream.abort(), 5000);
try {
await stream.done();
} catch (error) {
if (error instanceof Anthropic.APIUserAbortError) {
console.log('Stream aborted by user');
}
}import { betaZodTool } from '@anthropic-ai/sdk/helpers/zod';
import { z } from 'zod';
const weatherTool = betaZodTool({
name: 'get_weather',
inputSchema: z.object({
location: z.string(),
unit: z.enum(['celsius', 'fahrenheit']).default('fahrenheit'),
}),
description: 'Get current weather for a location',
run: async (input) => {
// Call weather API
return `Weather in ${input.location}: 72°F, sunny`;
},
});const finalMessage = await anthropic.beta.messages.toolRunner({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1000,
messages: [
{ role: 'user', content: 'What\'s the weather in San Francisco?' }
],
tools: [weatherTool],
});
console.log(finalMessage.content);const runner = anthropic.beta.messages.toolRunner({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1000,
messages: [{ role: 'user', content: 'Check weather and calculate travel time' }],
tools: [weatherTool, travelTimeTool],
stream: true,
});
for await (const messageStream of runner) {
for await (const event of messageStream) {
console.log('Event:', event);
}
console.log('Message:', await messageStream.finalMessage());
}async function createMessage(prompt: string) {
try {
const message = await anthropic.messages.create({
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: prompt }],
});
return message;
} catch (error) {
if (error instanceof Anthropic.APIError) {
console.error('API Error:', {
status: error.status,
name: error.name,
message: error.message,
headers: error.headers,
});
// Handle specific errors
if (error.status === 429) {
// Rate limit - implement backoff
console.log('Rate limited, waiting...');
await new Promise(resolve => setTimeout(resolve, 60000));
return createMessage(prompt); // Retry
} else if (error.status === 401) {
throw new Error('Invalid API key');
} else if (error.status === 400) {
throw new Error(`Bad request: ${error.message}`);
}
}
throw error;
}
}async function createWithRetry(
params: Anthropic.MessageCreateParams,
maxRetries = 3,
baseDelay = 1000
): Promise<Anthropic.Message> {
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
return await anthropic.messages.create(params);
} catch (error) {
if (error instanceof Anthropic.APIError && error.status === 429) {
// Rate limit - exponential backoff
const delay = baseDelay * Math.pow(2, attempt);
console.log(`Retry attempt ${attempt + 1} after ${delay}ms`);
await new Promise(resolve => setTimeout(resolve, delay));
} else {
throw error;
}
}
}
throw new Error('Max retries exceeded');
}import { encode } from 'gpt-tokenizer';
function estimateTokens(text: string): number {
// Approximate token count (Claude uses similar tokenization to GPT)
return encode(text).length;
}
function optimizePrompt(userMessage: string, maxTokens: number = 1024): string {
const tokens = estimateTokens(userMessage);
if (tokens > maxTokens) {
// Truncate or summarize
return userMessage.slice(0, maxTokens * 4); // Rough char estimate
}
return userMessage;
}interface ConversationMessage {
role: 'user' | 'assistant';
content: string;
tokens?: number;
}
class ConversationManager {
private messages: ConversationMessage[] = [];
private maxContextTokens = 100000;
addMessage(role: 'user' | 'assistant', content: string) {
const tokens = estimateTokens(content);
this.messages.push({ role, content, tokens });
this.pruneIfNeeded();
}
private pruneIfNeeded() {
const totalTokens = this.messages.reduce((sum, msg) => sum + (msg.tokens || 0), 0);
if (totalTokens > this.maxContextTokens) {
// Remove oldest messages (keep system prompt if present)
this.messages = this.messages.slice(-10); // Keep last 10 messages
}
}
getMessages() {
return this.messages.map(({ role, content }) => ({ role, content }));
}
}const batchResult = await anthropic.messages.batches.create({
requests: [
{
custom_id: 'request-1',
params: {
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Summarize this article...' }],
},
},
{
custom_id: 'request-2',
params: {
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Translate this text...' }],
},
},
],
});
// Poll for results
const batch = await anthropic.messages.batches.retrieve(batchResult.id);const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
timeout: 60000, // 60 seconds
maxRetries: 3,
});const message = await anthropic.messages.create(
{
model: 'claude-sonnet-4-5-20250929',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello' }],
},
{
headers: {
'anthropic-beta': 'max-tokens-3-5-sonnet-2024-07-15',
},
}
);// ❌ Bad
const anthropic = new Anthropic({ apiKey: 'sk-ant-...' });
// ✅ Good
const anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY });# .env file
ANTHROPIC_API_KEY=sk-ant-your-key-heremodel: 'claude-sonnet-4-5-20250929' // Latestsk-ant-