Integration
Anthropic SDK Integration
Complete guide for integrating AgentRouter with Anthropic SDK in Python and TypeScript
Anthropic SDK Integration
This guide provides comprehensive instructions for integrating AgentRouter using the Anthropic SDK. AgentRouter supports Claude models through the Anthropic-compatible API.
Installation
Python
pip install anthropicTypeScript
npm install @anthropic-ai/sdk
# or
yarn add @anthropic-ai/sdk
# or
pnpm add @anthropic-ai/sdkQuick Start
Python Setup
from anthropic import Anthropic
client = Anthropic(
api_key="sk-ar-your-api-key", # Your AgentRouter API key
base_url="https://your-agentrouter.com" # Your AgentRouter instance
)TypeScript Setup
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
apiKey: 'sk-ar-your-api-key', // Your AgentRouter API key
baseURL: 'https://your-agentrouter.com' // Your AgentRouter instance
});Supported Models
| Model | Description | Context Window |
|---|---|---|
claude-3-5-sonnet-20241022 | Claude 3.5 Sonnet (Latest) | 200K tokens |
claude-3-5-haiku-20241022 | Claude 3.5 Haiku | 200K tokens |
claude-3-opus-20240229 | Claude 3 Opus | 200K tokens |
claude-3-sonnet-20240229 | Claude 3 Sonnet | 200K tokens |
claude-3-haiku-20240307 | Claude 3 Haiku | 200K tokens |
Basic Messages
Python Example
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[
{"role": "user", "content": "Explain quantum computing in simple terms"}
]
)
print(message.content[0].text)TypeScript Example
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [
{ role: 'user', content: 'Explain quantum computing in simple terms' }
]
});
console.log(message.content[0].text);System Prompts
Python System Prompt
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
system="You are a professional Python programming expert. Provide clear, concise, and accurate code examples.",
messages=[
{"role": "user", "content": "How do I read a CSV file in Python?"}
]
)
print(message.content[0].text)TypeScript System Prompt
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
system: 'You are a professional Python programming expert. Provide clear, concise, and accurate code examples.',
messages: [
{ role: 'user', content: 'How do I read a CSV file in Python?' }
]
});
console.log(message.content[0].text);Multi-Turn Conversations
Python Conversation
messages = [
{"role": "user", "content": "My name is Bob"},
{"role": "assistant", "content": "Hello Bob! It's nice to meet you. How can I help you today?"},
{"role": "user", "content": "What's my name?"}
]
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=messages
)
print(message.content[0].text) # "Your name is Bob"TypeScript Conversation
const messages: Anthropic.MessageParam[] = [
{ role: 'user', content: 'My name is Bob' },
{ role: 'assistant', content: "Hello Bob! It's nice to meet you. How can I help you today?" },
{ role: 'user', content: "What's my name?" }
];
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: messages
});
console.log(message.content[0].text); // "Your name is Bob"Streaming Responses
Python Streaming
with client.messages.stream(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Write a short story about a robot"}]
) as stream:
for text in stream.text_stream:
print(text, end="", flush=True)TypeScript Streaming
const stream = await client.messages.stream({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Write a short story about a robot' }]
});
for await (const event of stream) {
if (event.type === 'content_block_delta' &&
event.delta.type === 'text_delta') {
process.stdout.write(event.delta.text);
}
}Alternative Streaming Pattern (TypeScript)
const stream = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Write a short story about a robot' }],
stream: true
});
for await (const chunk of stream) {
if (chunk.type === 'content_block_delta' &&
chunk.delta.type === 'text_delta') {
process.stdout.write(chunk.delta.text);
}
}Vision Capabilities
Claude 3 models support image inputs for visual understanding.
Python Vision
import base64
# Read and encode image
with open("diagram.jpg", "rb") as image_file:
image_data = base64.b64encode(image_file.read()).decode('utf-8')
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[
{
"role": "user",
"content": [
{
"type": "image",
"source": {
"type": "base64",
"media_type": "image/jpeg",
"data": image_data
}
},
{
"type": "text",
"text": "What does this diagram show? Please explain in detail."
}
]
}
]
)
print(message.content[0].text)TypeScript Vision
import * as fs from 'fs';
// Read and encode image
const imageBuffer = fs.readFileSync('diagram.jpg');
const imageData = imageBuffer.toString('base64');
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [
{
role: 'user',
content: [
{
type: 'image',
source: {
type: 'base64',
media_type: 'image/jpeg',
data: imageData
}
},
{
type: 'text',
text: 'What does this diagram show? Please explain in detail.'
}
]
}
]
});
console.log(message.content[0].text);Tool Use (Function Calling)
Claude supports sophisticated tool use for function calling.
Python Tool Use
tools = [
{
"name": "get_stock_price",
"description": "Get the current stock price for a given ticker symbol",
"input_schema": {
"type": "object",
"properties": {
"ticker": {
"type": "string",
"description": "The stock ticker symbol, e.g., AAPL for Apple"
},
"exchange": {
"type": "string",
"enum": ["NYSE", "NASDAQ", "LSE"],
"description": "The stock exchange"
}
},
"required": ["ticker"]
}
}
]
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=tools,
messages=[
{"role": "user", "content": "What's the current price of Apple stock?"}
]
)
# Check for tool use
for content in message.content:
if content.type == "tool_use":
print(f"Tool: {content.name}")
print(f"Input: {content.input}")
# Execute the tool (mock example)
tool_result = {"price": 178.25, "currency": "USD"}
# Send tool result back to Claude
response = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
tools=tools,
messages=[
{"role": "user", "content": "What's the current price of Apple stock?"},
{"role": "assistant", "content": message.content},
{
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": content.id,
"content": str(tool_result)
}
]
}
]
)
print(response.content[0].text)TypeScript Tool Use
const tools: Anthropic.Tool[] = [
{
name: 'get_stock_price',
description: 'Get the current stock price for a given ticker symbol',
input_schema: {
type: 'object',
properties: {
ticker: {
type: 'string',
description: 'The stock ticker symbol, e.g., AAPL for Apple'
},
exchange: {
type: 'string',
enum: ['NYSE', 'NASDAQ', 'LSE'],
description: 'The stock exchange'
}
},
required: ['ticker']
}
}
];
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
tools: tools,
messages: [
{ role: 'user', content: "What's the current price of Apple stock?" }
]
});
// Check for tool use
for (const content of message.content) {
if (content.type === 'tool_use') {
console.log(`Tool: ${content.name}`);
console.log(`Input: ${JSON.stringify(content.input)}`);
// Execute the tool (mock example)
const toolResult = { price: 178.25, currency: 'USD' };
// Send tool result back to Claude
const response = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
tools: tools,
messages: [
{ role: 'user', content: "What's the current price of Apple stock?" },
{ role: 'assistant', content: message.content },
{
role: 'user',
content: [
{
type: 'tool_result',
tool_use_id: content.id,
content: JSON.stringify(toolResult)
}
]
}
]
});
console.log(response.content[0].text);
}
}Advanced Parameters
Temperature and Sampling
# Python
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
temperature=0.7, # 0-1, higher = more creative
top_p=0.9, # Nucleus sampling
top_k=50, # Top-K sampling
messages=[
{"role": "user", "content": "Write a creative poem about the ocean"}
]
)// TypeScript
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
temperature: 0.7, // 0-1, higher = more creative
top_p: 0.9, // Nucleus sampling
top_k: 50, // Top-K sampling
messages: [
{ role: 'user', content: 'Write a creative poem about the ocean' }
]
});Error Handling
Python Error Handling
from anthropic import (
APIError,
APIConnectionError,
RateLimitError,
AuthenticationError,
BadRequestError
)
try:
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
except AuthenticationError:
print("Authentication failed - check your API key")
except RateLimitError as e:
print(f"Rate limit exceeded - retry after {e.response.headers.get('retry-after')}s")
except BadRequestError as e:
print(f"Bad request - check parameters: {e.message}")
except APIConnectionError:
print("Network connection error - check your internet connection")
except APIError as e:
print(f"API error: {e.status_code} - {e.message}")TypeScript Error Handling
import Anthropic from '@anthropic-ai/sdk';
try {
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
} catch (error) {
if (error instanceof Anthropic.APIError) {
console.error(`API Error: ${error.status} - ${error.message}`);
if (error.status === 401) {
console.error('Authentication failed - check your API key');
} else if (error.status === 402) {
console.error('Insufficient wallet balance');
} else if (error.status === 429) {
console.error('Rate limit exceeded');
} else if (error.status === 400) {
console.error('Bad request - check your parameters');
}
} else {
console.error('Unexpected error:', error);
}
}Best Practices
Environment Variables
Python:
import os
from anthropic import Anthropic
client = Anthropic(
api_key=os.getenv("AGENTROUTER_API_KEY"),
base_url=os.getenv("AGENTROUTER_BASE_URL", "https://your-agentrouter.com")
)TypeScript:
import Anthropic from '@anthropic-ai/sdk';
const client = new Anthropic({
apiKey: process.env.AGENTROUTER_API_KEY,
baseURL: process.env.AGENTROUTER_BASE_URL || 'https://your-agentrouter.com'
});Retry Logic
Python:
import time
from anthropic import APIError
def create_message_with_retry(client, max_retries=3, **kwargs):
"""Create message with exponential backoff retry."""
for attempt in range(max_retries):
try:
return client.messages.create(**kwargs)
except APIError as e:
if attempt == max_retries - 1:
raise
wait_time = 2 ** attempt # Exponential backoff
print(f"Error: {e}, retrying in {wait_time}s...")
time.sleep(wait_time)
# Usage
message = create_message_with_retry(
client,
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)TypeScript:
async function createMessageWithRetry(
client: Anthropic,
params: Anthropic.MessageCreateParams,
maxRetries = 3
): Promise<Anthropic.Message> {
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
return await client.messages.create(params);
} catch (error) {
if (attempt === maxRetries - 1) throw error;
const waitTime = Math.pow(2, attempt) * 1000; // Exponential backoff
console.log(`Error: ${error}, retrying in ${waitTime}ms...`);
await new Promise(resolve => setTimeout(resolve, waitTime));
}
}
throw new Error('Max retries exceeded');
}
// Usage
const message = await createMessageWithRetry(client, {
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});Token Usage Tracking
Python:
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1024,
messages=[{"role": "user", "content": "Hello!"}]
)
logger.info(f"Tokens - Input: {message.usage.input_tokens}, "
f"Output: {message.usage.output_tokens}")TypeScript:
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
messages: [{ role: 'user', content: 'Hello!' }]
});
console.log(`Tokens - Input: ${message.usage.input_tokens}, ` +
`Output: ${message.usage.output_tokens}`);Long Document Processing
Claude's 200K context window allows processing of large documents:
Python:
# Read a long document
with open("long_document.txt", "r", encoding="utf-8") as f:
document = f.read()
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=2048,
messages=[
{
"role": "user",
"content": f"Please summarize the following document:\n\n{document}"
}
]
)
print(message.content[0].text)TypeScript:
import * as fs from 'fs';
// Read a long document
const document = fs.readFileSync('long_document.txt', 'utf-8');
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 2048,
messages: [
{
role: 'user',
content: `Please summarize the following document:\n\n${document}`
}
]
});
console.log(message.content[0].text);Common Error Codes
| Status Code | Description | Solution |
|---|---|---|
| 400 | Bad Request | Check that max_tokens is provided and parameters are valid |
| 401 | Unauthorized | Verify API key format (must start with sk-ar-) |
| 402 | Payment Required | Insufficient wallet balance - top up your wallet |
| 429 | Rate Limit Exceeded | Wait for rate limit reset or adjust request frequency |
| 500 | Internal Server Error | Upstream provider issue - retry later |
Complete Example Applications
Python Document Analyzer
#!/usr/bin/env python3
import os
from anthropic import Anthropic
def analyze_document(file_path: str):
"""Analyze a document using Claude."""
client = Anthropic(
api_key=os.getenv("AGENTROUTER_API_KEY"),
base_url=os.getenv("AGENTROUTER_BASE_URL", "https://your-agentrouter.com")
)
# Read document
with open(file_path, "r", encoding="utf-8") as f:
content = f.read()
# Analyze
message = client.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=2048,
system="You are a professional document analyst. Provide detailed, structured analysis.",
messages=[
{
"role": "user",
"content": f"Please analyze this document and provide:\n"
f"1. Main topics\n"
f"2. Key insights\n"
f"3. Recommendations\n\n"
f"Document:\n{content}"
}
]
)
print(message.content[0].text)
print(f"\nTokens used: {message.usage.input_tokens + message.usage.output_tokens}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("Usage: python analyze.py <file_path>")
sys.exit(1)
analyze_document(sys.argv[1])TypeScript Interactive Assistant
import Anthropic from '@anthropic-ai/sdk';
import * as readline from 'readline';
const client = new Anthropic({
apiKey: process.env.AGENTROUTER_API_KEY!,
baseURL: process.env.AGENTROUTER_BASE_URL || 'https://your-agentrouter.com'
});
async function main() {
const messages: Anthropic.MessageParam[] = [];
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
console.log("Claude Assistant started! Type 'quit' to exit.\n");
const askQuestion = (query: string): Promise<string> => {
return new Promise(resolve => rl.question(query, resolve));
};
while (true) {
const userInput = await askQuestion('You: ');
if (userInput.toLowerCase() === 'quit') {
break;
}
if (!userInput.trim()) {
continue;
}
messages.push({ role: 'user', content: userInput });
try {
const message = await client.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 1024,
system: 'You are a helpful, friendly assistant.',
messages: messages
});
const assistantText = message.content[0].text;
messages.push({ role: 'assistant', content: assistantText });
console.log(`\nClaude: ${assistantText}\n`);
console.log(`(Tokens: ${message.usage.input_tokens + message.usage.output_tokens})\n`);
} catch (error) {
console.error(`Error: ${error}\n`);
}
}
rl.close();
}
main();