anthropic api
API 配置
认证
import Anthropic from '@anthropic-ai/sdk';
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});import Anthropic from '@anthropic-ai/sdk';
const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
});const response = await anthropic.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 8192,
system: systemPrompt,
messages: [
{ role: 'user', content: 'Hello, Claude!' }
],
});
console.log(response.content[0].text);const stream = await anthropic.messages.stream({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 8192,
system: systemPrompt,
messages: messages,
});
for await (const event of stream) {
if (event.type === 'content_block_delta') {
process.stdout.write(event.delta.text);
}
}const response = await anthropic.messages.create({
model: 'claude-3-5-sonnet-20241022',
max_tokens: 8192,
system: systemPrompt,
messages: messages,
tools: [
{
name: 'readFile',
description: 'Read a file from the filesystem',
input_schema: {
type: 'object',
properties: {
path: { type: 'string', description: 'File path' }
},
required: ['path']
}
}
],
});const MODELS = {
'claude-3-5-sonnet-20241022': {
contextWindow: 200000,
maxOutput: 8192,
cost: { input: 3, output: 15 }, // per million tokens
},
'claude-3-opus-20240229': {
contextWindow: 200000,
maxOutput: 4096,
cost: { input: 15, output: 75 },
},
'claude-3-haiku-20240307': {
contextWindow: 200000,
maxOutput: 4096,
cost: { input: 0.25, output: 1.25 },
},
};{
"model": "claude-3-5-sonnet-20241022",
"temperature": 0.7,
"maxTokens": 8192
}const response = await anthropic.messages.create({
model: 'claude-3-5-sonnet-20241022',
system: [
{
type: 'text',
text: STATIC_PROMPT,
cache_control: { type: 'ephemeral' }, // 缓存这部分
},
{
type: 'text',
text: dynamicPrompt,
},
],
messages: messages,
});interface CacheStats {
cacheCreationInputTokens: number;
cacheReadInputTokens: number;
inputTokens: number;
outputTokens: number;
}
const stats = response.usage;
console.log(`Cache hit rate: ${stats.cacheReadInputTokens / stats.inputTokens * 100}%`);try {
const response = await anthropic.messages.create({...});
} catch (error) {
if (error instanceof Anthropic.APIError) {
console.error('API Error:', error.status, error.message);
if (error.status === 429) {
// 速率限制
await sleep(1000);
return retry();
} else if (error.status === 500) {
// 服务器错误
return retry();
}
}
}async function callWithRetry(
fn: () => Promise<any>,
maxRetries: number = 3
): Promise<any> {
for (let i = 0; i < maxRetries; i++) {
try {
return await fn();
} catch (error) {
if (i === maxRetries - 1) throw error;
// 指数退避
await sleep(1000 * Math.pow(2, i));
}
}
}class RateLimiter {
private queue: Array<() => Promise<any>> = [];
private running = 0;
private maxConcurrent = 5;
async execute<T>(fn: () => Promise<T>): Promise<T> {
while (this.running >= this.maxConcurrent) {
await sleep(100);
}
this.running++;
try {
return await fn();
} finally {
this.running--;
}
}
}function estimateTokens(text: string): number {
// 粗略估算:1 token ≈ 4 字符
return Math.ceil(text.length / 4);
}
function estimateMessageTokens(messages: Message[]): number {
let total = 0;
for (const message of messages) {
total += estimateTokens(JSON.stringify(message));
}
return total;
}const TOKEN_BUDGET = {
total: 200000,
systemPrompt: 20000,
conversation: 150000,
toolResults: 20000,
reserved: 10000,
};
function checkTokenBudget(messages: Message[]): boolean {
const used = estimateMessageTokens(messages);
return used < TOKEN_BUDGET.total - TOKEN_BUDGET.reserved;
}interface UsageStats {
inputTokens: number;
outputTokens: number;
cacheReadTokens: number;
cacheCreationTokens: number;
}
function calculateCost(usage: UsageStats, model: string): number {
const pricing = MODELS[model].cost;
const inputCost = usage.inputTokens * pricing.input / 1_000_000;
const outputCost = usage.outputTokens * pricing.output / 1_000_000;
const cacheCost = usage.cacheReadTokens * pricing.input * 0.1 / 1_000_000;
return inputCost + outputCost + cacheCost;
}class CostTracker {
private totalCost = 0;
private requests = 0;
track(usage: UsageStats, model: string): void {
const cost = calculateCost(usage, model);
this.totalCost += cost;
this.requests++;
}
getStats() {
return {
totalCost: this.totalCost,
requests: this.requests,
avgCost: this.totalCost / this.requests,
};
}
}system: [
{ type: 'text', text: STATIC_RULES, cache_control: { type: 'ephemeral' } },
{ type: 'text', text: STATIC_TOOLS, cache_control: { type: 'ephemeral' } },
{ type: 'text', text: dynamicContext },
]// ❌ 多次调用
for (const file of files) {
await analyzeFile(file);
}
// ✅ 批量处理
await analyzeFiles(files);const stream = anthropic.messages.stream({...});
for await (const event of stream) {
// 实时显示响应
updateUI(event);
}try {
return await anthropic.messages.create({...});
} catch (error) {
if (isRetryable(error)) {
return await retry();
}
throw error;
}